void BaseQualityEvaluator::readAllDatasetsRunParams() { string filename = getRunParamsFilename(); FileStorage fs( filename, FileStorage::READ ); if( !fs.isOpened() ) { isWriteParams = true; setDefaultAllDatasetsRunParams(); printf("All runParams are default.\n"); } else { isWriteParams = false; FileNode topfn = fs.getFirstTopLevelNode(); FileNode fn = topfn[DEFAULT_PARAMS]; readDefaultRunParams(fn); for( int i = 0; i < DATASETS_COUNT; i++ ) { FileNode fn = topfn[DATASET_NAMES[i]]; if( fn.empty() ) { printf( "%d-runParams is default.\n", i); setDefaultDatasetRunParams(i); } else readDatasetRunParams(fn, i); } } }
inline void read(const FileNode& node, params &x, const params & default_value = params()) { if(node.empty()) x = default_value; else x.read(node); }
void CameraCalibration::read( const FileNode& node, Settings& x, const Settings& default_value ) { if ( node.empty() ) x = default_value; else { x.read( node ); } }
void read(const FileNode& node, ICFDetector& d, const ICFDetector& default_value) { if( node.empty() ) d = default_value; else d.read(node); }
void DetectorQualityEvaluator::readDefaultRunParams (FileNode &fn) { if (! fn.empty() ) { isSaveKeypointsDefault = (int)fn[IS_SAVE_KEYPOINTS] != 0; defaultDetector->read (fn); } }
/// static function that reads the settings file /// @param[in] node FileNode to read from /// @param[out] Settings Settings to be saved from FileNode /// @param[in] default_value value by default if FileNode is empty static void read(const FileNode& node, Settings& x, const Settings& default_value = Settings()) { if (node.empty()){ x = default_value; } else{ x.read(node); } }
void DescriptorQualityEvaluator::readDefaultRunParams (FileNode &fn) { if (! fn.empty() ) { commRunParamsDefault.projectKeypointsFrom1Image = (int)fn[PROJECT_KEYPOINTS_FROM_1IMAGE] != 0; commRunParamsDefault.matchFilter = (int)fn[MATCH_FILTER]; defaultDescMatcher->read (fn); } }
bool CvFeatureParams::read( const FileNode &node ) { if( node.empty() ) return false; maxCatCount = node[CC_MAX_CAT_COUNT]; featSize = node[CC_FEATURE_SIZE]; numFeatures = node[CC_NUM_FEATURES]; return ( maxCatCount >= 0 && featSize >= 1 ); }
/// static function that reads the results file /// @param[in] node FileNode to read from /// @param[out] Results result to be saved from FileNode /// @param[in] default_value value by default if FileNode is empty static void read(const FileNode& node, Results& result,const Results& default_value = Results()) { if (node.empty()) { result = default_value; } else { result.read(node); } }
void PCAread(const FileNode& fs, PCA& pca) { CV_Assert( !fs.empty() ); String name = (String)fs["name"]; CV_Assert( name == "PCA" ); cv::read(fs["vectors"], pca.eigenvectors); cv::read(fs["values"], pca.eigenvalues); cv::read(fs["mean"], pca.mean); }
void BaseHoughLineTest::run_test(int type) { string filename = cvtest::TS::ptr()->get_data_path() + picture_name; Mat src = imread(filename, IMREAD_GRAYSCALE); EXPECT_FALSE(src.empty()) << "Invalid test image: " << filename; string xml; if (type == STANDART) xml = string(cvtest::TS::ptr()->get_data_path()) + "imgproc/HoughLines.xml"; else if (type == PROBABILISTIC) xml = string(cvtest::TS::ptr()->get_data_path()) + "imgproc/HoughLinesP.xml"; Mat dst; Canny(src, dst, 100, 150, 3); EXPECT_FALSE(dst.empty()) << "Failed Canny edge detector"; Mat lines; if (type == STANDART) HoughLines(dst, lines, rhoStep, thetaStep, threshold, 0, 0); else if (type == PROBABILISTIC) HoughLinesP(dst, lines, rhoStep, thetaStep, threshold, minLineLength, maxGap); String test_case_name = format("lines_%s_%.0f_%.2f_%d_%d_%d", picture_name.c_str(), rhoStep, thetaStep, threshold, minLineLength, maxGap); test_case_name = getTestCaseName(test_case_name); FileStorage fs(xml, FileStorage::READ); FileNode node = fs[test_case_name]; if (node.empty()) { fs.release(); fs.open(xml, FileStorage::APPEND); EXPECT_TRUE(fs.isOpened()) << "Cannot open sanity data file: " << xml; fs << test_case_name << lines; fs.release(); fs.open(xml, FileStorage::READ); EXPECT_TRUE(fs.isOpened()) << "Cannot open sanity data file: " << xml; } Mat exp_lines; read( fs[test_case_name], exp_lines, Mat() ); fs.release(); int count = -1; if (type == STANDART) count = countMatIntersection<Vec2f>(exp_lines, lines, (float) thetaStep + FLT_EPSILON, (float) rhoStep + FLT_EPSILON); else if (type == PROBABILISTIC) count = countMatIntersection<Vec4i>(exp_lines, lines, 1e-4f, 0.f); #if defined HAVE_IPP && IPP_VERSION_X100 >= 810 && IPP_DISABLE_BLOCK EXPECT_GE( count, (int) (exp_lines.total() * 0.8) ); #else EXPECT_EQ( count, (int) exp_lines.total()); #endif }
void read ( const FileNode& node, MyData& x, const MyData& default_value = MyData ( ) ) { if ( node.empty ( ) ) { x = default_value; } else { x.read ( node ); } }
bool CascadeClassifier::read(const FileNode& root) { if( !data.read(root) ) return false; // load features featureEvaluator = FeatureEvaluator::create(data.featureType); FileNode fn = root[CC_FEATURES]; if( fn.empty() ) return false; return featureEvaluator->read(fn); }
bool CvCascadeClassifier::readStages( const FileNode &node) { FileNode rnode = node[CC_STAGES]; if (!rnode.empty() || !rnode.isSeq()) return false; stageClassifiers.reserve(numStages); FileNodeIterator it = rnode.begin(); for( int i = 0; i < min( (int)rnode.size(), numStages ); i++, it++ ) { Ptr<CvCascadeBoost> tempStage = makePtr<CvCascadeBoost>(); if ( !tempStage->read( *it, featureEvaluator, *stageParams) ) return false; stageClassifiers.push_back(tempStage); } return true; }
void readVectorOfVector(FileStorage &fns, string name, vector<vector<KeyPoint> > &vov) { vov.clear(); FileNode fn = fns[name]; if (fn.empty()){ return; } FileNodeIterator current = fn.begin(), it_end = fn.end(); // Go through the node for (; current != it_end; ++current) { vector<KeyPoint> tmp; FileNode item = *current; read(item, tmp); vov.push_back(tmp); } }
bool f_lcc::load_rad_data() { char buf[1024]; snprintf(buf, 1024, "%s.yml", m_fmap); int size; FileStorage fs(buf, FileStorage::READ); if(!fs.isOpened()){ cerr << "Failed to open " << buf << endl; return false; }else{ FileNode fn = fs["LCCR"]; if(!fn.empty()){ fn >> size; } fn = fs["LCCAMap"]; if(!fn.empty()){ fn >> m_amap; }
bool CvCascadeClassifier::readStages( const FileNode &node) { FileNode rnode = node[CC_STAGES]; if (!rnode.empty() || !rnode.isSeq()) return false; stageClassifiers.reserve(numStages); FileNodeIterator it = rnode.begin(); for( int i = 0; i < min( (int)rnode.size(), numStages ); i++, it++ ) { CvCascadeBoost* tempStage = new CvCascadeBoost; if ( !tempStage->read( *it, (CvFeatureEvaluator *)featureEvaluator, *((CvCascadeBoostParams*)stageParams) ) ) { delete tempStage; return false; } stageClassifiers.push_back(tempStage); } return true; }
int CV_MLBaseTest::prepare_test_case( int test_case_idx ) { int trainSampleCount, respIdx; string varTypes; clear(); string dataPath = ts->get_data_path(); if ( dataPath.empty() ) { ts->printf( cvtest::TS::LOG, "data path is empty" ); return cvtest::TS::FAIL_INVALID_TEST_DATA; } string dataName = dataSetNames[test_case_idx], filename = dataPath + dataName + ".data"; if ( data.read_csv( filename.c_str() ) != 0) { char msg[100]; sprintf( msg, "file %s can not be read", filename.c_str() ); ts->printf( cvtest::TS::LOG, msg ); return cvtest::TS::FAIL_INVALID_TEST_DATA; } FileNode dataParamsNode = validationFS.getFirstTopLevelNode()["validation"][modelName][dataName]["data_params"]; CV_DbgAssert( !dataParamsNode.empty() ); CV_DbgAssert( !dataParamsNode["LS"].empty() ); dataParamsNode["LS"] >> trainSampleCount; CvTrainTestSplit spl( trainSampleCount ); data.set_train_test_split( &spl ); CV_DbgAssert( !dataParamsNode["resp_idx"].empty() ); dataParamsNode["resp_idx"] >> respIdx; data.set_response_idx( respIdx ); CV_DbgAssert( !dataParamsNode["types"].empty() ); dataParamsNode["types"] >> varTypes; data.set_var_types( varTypes.c_str() ); return cvtest::TS::OK; }
void init_nn( NeuralNet_MLP& mlp, const string& data ) { assert( mlp.get_layer_count() == 0 ); try { FileStorage fs( data, cv::FileStorage::READ | cv::FileStorage::MEMORY ); FileNode fn = fs[ "mlp" ]; if ( !fn.empty() ) { mlp.read( *fs, *fn ); } else { throw runtime_error( "invalid data in file" ); } } catch ( const exception& e ) { throw runtime_error( string( "Failed load train neural network state, reason: " ) + e.what() ); } }
void SVMSGDImpl::readParams( const FileNode& fn ) { String svmsgdTypeStr = (String)fn["svmsgdType"]; int svmsgdType = svmsgdTypeStr == "SGD" ? SGD : svmsgdTypeStr == "ASGD" ? ASGD : -1; if( svmsgdType < 0 ) CV_Error( CV_StsParseError, "Missing or invalid SVMSGD type" ); params.svmsgdType = svmsgdType; String marginTypeStr = (String)fn["marginType"]; int marginType = marginTypeStr == "SOFT_MARGIN" ? SOFT_MARGIN : marginTypeStr == "HARD_MARGIN" ? HARD_MARGIN : -1; if( marginType < 0 ) CV_Error( CV_StsParseError, "Missing or invalid margin type" ); params.marginType = marginType; CV_Assert ( fn["marginRegularization"].isReal() ); params.marginRegularization = (float)fn["marginRegularization"]; CV_Assert ( fn["initialStepSize"].isReal() ); params.initialStepSize = (float)fn["initialStepSize"]; CV_Assert ( fn["stepDecreasingPower"].isReal() ); params.stepDecreasingPower = (float)fn["stepDecreasingPower"]; FileNode tcnode = fn["term_criteria"]; CV_Assert(!tcnode.empty()); params.termCrit.epsilon = (double)tcnode["epsilon"]; params.termCrit.maxCount = (int)tcnode["iterations"]; params.termCrit.type = (params.termCrit.epsilon > 0 ? TermCriteria::EPS : 0) + (params.termCrit.maxCount > 0 ? TermCriteria::COUNT : 0); CV_Assert ((params.termCrit.type & TermCriteria::COUNT || params.termCrit.type & TermCriteria::EPS)); }
int CV_MLBaseTest::prepare_test_case( int test_case_idx ) { clear(); string dataPath = ts->get_data_path(); if ( dataPath.empty() ) { ts->printf( cvtest::TS::LOG, "data path is empty" ); return cvtest::TS::FAIL_INVALID_TEST_DATA; } string dataName = dataSetNames[test_case_idx], filename = dataPath + dataName + ".data"; FileNode dataParamsNode = validationFS.getFirstTopLevelNode()["validation"][modelName][dataName]["data_params"]; CV_DbgAssert( !dataParamsNode.empty() ); CV_DbgAssert( !dataParamsNode["LS"].empty() ); int trainSampleCount = (int)dataParamsNode["LS"]; CV_DbgAssert( !dataParamsNode["resp_idx"].empty() ); int respIdx = (int)dataParamsNode["resp_idx"]; CV_DbgAssert( !dataParamsNode["types"].empty() ); String varTypes = (String)dataParamsNode["types"]; data = TrainData::loadFromCSV(filename, 0, respIdx, respIdx+1, varTypes); if( data.empty() ) { ts->printf( cvtest::TS::LOG, "file %s can not be read\n", filename.c_str() ); return cvtest::TS::FAIL_INVALID_TEST_DATA; } data->setTrainTestSplit(trainSampleCount); return cvtest::TS::OK; }
bool CascadeClassifier::Data::read(const FileNode &root) { static const float THRESHOLD_EPS = 1e-5f; // load stage params String stageTypeStr = (String)root[CC_STAGE_TYPE]; if( stageTypeStr == CC_BOOST ) stageType = BOOST; else return false; String featureTypeStr = (String)root[CC_FEATURE_TYPE]; if( featureTypeStr == CC_HAAR ) featureType = FeatureEvaluator::HAAR; else if( featureTypeStr == CC_LBP ) featureType = FeatureEvaluator::LBP; else if( featureTypeStr == CC_HOG ) featureType = FeatureEvaluator::HOG; else return false; origWinSize.width = (int)root[CC_WIDTH]; origWinSize.height = (int)root[CC_HEIGHT]; CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 ); isStumpBased = (int)(root[CC_STAGE_PARAMS][CC_MAX_DEPTH]) == 1 ? true : false; // load feature params FileNode fn = root[CC_FEATURE_PARAMS]; if( fn.empty() ) return false; ncategories = fn[CC_MAX_CAT_COUNT]; int subsetSize = (ncategories + 31)/32, nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 ); // load stages fn = root[CC_STAGES]; if( fn.empty() ) return false; stages.reserve(fn.size()); classifiers.clear(); nodes.clear(); FileNodeIterator it = fn.begin(), it_end = fn.end(); for( int si = 0; it != it_end; si++, ++it ) { FileNode fns = *it; Stage stage; stage.threshold = (float)fns[CC_STAGE_THRESHOLD] - THRESHOLD_EPS; fns = fns[CC_WEAK_CLASSIFIERS]; if(fns.empty()) return false; stage.ntrees = (int)fns.size(); stage.first = (int)classifiers.size(); stages.push_back(stage); classifiers.reserve(stages[si].first + stages[si].ntrees); FileNodeIterator it1 = fns.begin(), it1_end = fns.end(); for( ; it1 != it1_end; ++it1 ) // weak trees { FileNode fnw = *it1; FileNode internalNodes = fnw[CC_INTERNAL_NODES]; FileNode leafValues = fnw[CC_LEAF_VALUES]; if( internalNodes.empty() || leafValues.empty() ) return false; DTree tree; tree.nodeCount = (int)internalNodes.size()/nodeStep; classifiers.push_back(tree); nodes.reserve(nodes.size() + tree.nodeCount); leaves.reserve(leaves.size() + leafValues.size()); if( subsetSize > 0 ) subsets.reserve(subsets.size() + tree.nodeCount*subsetSize); FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end(); for( ; internalNodesIter != internalNodesEnd; ) // nodes { DTreeNode node; node.left = (int)*internalNodesIter; ++internalNodesIter; node.right = (int)*internalNodesIter; ++internalNodesIter; node.featureIdx = (int)*internalNodesIter; ++internalNodesIter; if( subsetSize > 0 ) { for( int j = 0; j < subsetSize; j++, ++internalNodesIter ) subsets.push_back((int)*internalNodesIter); node.threshold = 0.f; } else { node.threshold = (float)*internalNodesIter; ++internalNodesIter; } nodes.push_back(node); } internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end(); for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves leaves.push_back((float)*internalNodesIter); } } return true; }
static void read(const FileNode& node, MyData& x, const MyData& default_value = MyData()){ if(node.empty()) x = default_value; else x.read(node); }
static void read(const FileNode& node, TestHashEntry& x, const TestHashEntry& default_value = TestHashEntry()){ if(node.empty()) x = default_value; else x.read(node); }
int main(int argc, char** argv) { //vector of points to store the feature points calculated from part 3 vector<Point> points1; vector<Point> points2; //loading the first .yml file //should have the same name as image file + _Points.yml string filename = string(argv[1]); filename = filename.substr(0, filename.find(".pgm")); string pointfilename = filename+"_Points.yml"; FileStorage fs(pointfilename, FileStorage::READ); FileNode ptFileNode; int num1 = 0; while(true) { string pointName = "point"; Point tempPoint; stringstream ss; ss << num1; string temp; ss >> temp; pointName+=temp; ptFileNode = fs[pointName]; if(ptFileNode.empty() == true) { break; //break when finding an empty filenode, this means we've reached the end of our list } ptFileNode >> tempPoint; points1.push_back(tempPoint); num1++; } //loading the second file string filename2 = string(argv[2]); filename2 = filename2.substr(0, filename2.find(".pgm")); string pointfilename2 = filename2+"_Points.yml"; FileStorage fs2(pointfilename2, FileStorage::READ); FileNode ptFileNode2; int num2 = 0; while(true) { string pointName = "point"; Point tempPoint; stringstream ss; ss << num2; string temp; ss >> temp; pointName+=temp; ptFileNode2 = fs2[pointName]; if(ptFileNode2.empty() == true) { break; } ptFileNode2 >> tempPoint; points2.push_back(tempPoint); num2++; } Mat image; Mat image2; //reads image name from command line image = imread(argv[1], CV_LOAD_IMAGE_GRAYSCALE); image2 = imread(argv[2], CV_LOAD_IMAGE_GRAYSCALE); Mat imageColor; imageColor = imread(argv[2], 1); int sum; int difference; Scalar im1; Scalar im2; vector<feature> potentialMatches; for(int j=0; j<points2.size(); j++) { Point currPoint2 = points2[j]; //iterate through all features in the reference frame for(int k=0; k<points1.size(); k++) { Point currPoint1 = points1[k]; //iterate through all features in the other image sum = 0; for(int r = -windHalf; r <= windHalf; r++) { for(int c = -windHalf; c <= windHalf; c++) { //check a window x window sized square of pixels to compute ssd // do thie for every feature of image 1 with respect to image 2 (reference Point addPoint = Point(r,c); Point squarePoint2 = currPoint2+addPoint; Point squarePoint1 = currPoint1+addPoint; if(squarePoint2.x < 0 || squarePoint1.x < 0 || squarePoint2.x >= image.cols || squarePoint1.x >= image.cols) { continue; //make sure the square doesn't go out of bounds } if(squarePoint2.y < 0 || squarePoint1.y < 0 || squarePoint2.y >= image.rows || squarePoint1.y >= image.rows) { continue; } im2 = image2.at<unsigned char>(squarePoint2); im1 = image.at<unsigned char>(squarePoint1); difference = im1.val[0] - im2.val[0]; sum += difference*difference; } } feature tempF; tempF.ssd = sum; tempF.p = currPoint1; potentialMatches.push_back(tempF); } sort(potentialMatches.begin(), potentialMatches.end()); //sort matches by ssd value from low to high if((double)potentialMatches[0].ssd/potentialMatches[1].ssd < threshold_val) { //if below threshold, it is a significant match, so draw a line between the points line(imageColor, currPoint2, potentialMatches[0].p, Scalar(255, 0, 0)); } potentialMatches.clear(); } for(int i=0; i<points2.size(); i++) { circle(imageColor, points2[i], 3, Scalar(0,0,255)); } imshow("Matched", imageColor); //save image to file string outputName = argv[2]; outputName = outputName.substr(0, outputName.find(".pgm")); outputName += "_matched.pgm"; imwrite(outputName, imageColor); waitKey(0); return(0); }
void HistogramsIO::read(const FileNode & node, Ptr<Histogram> & x) { if (!node.empty()) x->read(node); }
bool fill(const FileNode &root) { // cascade properties static const char *const SC_STAGE_TYPE = "stageType"; static const char *const SC_BOOST = "BOOST"; static const char *const SC_FEATURE_TYPE = "featureType"; static const char *const SC_ICF = "ICF"; static const char *const SC_ORIG_W = "width"; static const char *const SC_ORIG_H = "height"; static const char *const SC_OCTAVES = "octaves"; static const char *const SC_TREES = "trees"; static const char *const SC_FEATURES = "features"; static const char *const SC_INTERNAL = "internalNodes"; static const char *const SC_LEAF = "leafValues"; static const char *const SC_SHRINKAGE = "shrinkage"; static const char *const FEATURE_FORMAT = "featureFormat"; // only Ada Boost supported std::string stageTypeStr = (string)root[SC_STAGE_TYPE]; CV_Assert(stageTypeStr == SC_BOOST); std::string fformat = (string)root[FEATURE_FORMAT]; bool useBoxes = (fformat == "BOX"); // only HOG-like integral channel features supported string featureTypeStr = (string)root[SC_FEATURE_TYPE]; CV_Assert(featureTypeStr == SC_ICF); origObjWidth = (int)root[SC_ORIG_W]; origObjHeight = (int)root[SC_ORIG_H]; shrinkage = (int)root[SC_SHRINKAGE]; FileNode fn = root[SC_OCTAVES]; if (fn.empty()) return false; // for each octave FileNodeIterator it = fn.begin(), it_end = fn.end(); for (int octIndex = 0; it != it_end; ++it, ++octIndex) { FileNode fns = *it; Octave octave(octIndex, cv::Size(origObjWidth, origObjHeight), fns); CV_Assert(octave.weaks > 0); octaves.push_back(octave); FileNode ffs = fns[SC_FEATURES]; if (ffs.empty()) return false; fns = fns[SC_TREES]; if (fn.empty()) return false; FileNodeIterator st = fns.begin(), st_end = fns.end(); for (; st != st_end; ++st ) { weaks.push_back(Weak(*st)); fns = (*st)[SC_INTERNAL]; FileNodeIterator inIt = fns.begin(), inIt_end = fns.end(); for (; inIt != inIt_end;) nodes.push_back(Node(features.size(), inIt)); fns = (*st)[SC_LEAF]; inIt = fns.begin(), inIt_end = fns.end(); for (; inIt != inIt_end; ++inIt) leaves.push_back((float)(*inIt)); } st = ffs.begin(), st_end = ffs.end(); for (; st != st_end; ++st ) features.push_back(Feature(*st, useBoxes)); } return true; }
static void read(const FileNode& node, Cluster& x, const Cluster& default_value = Cluster()){ if (node.empty()) x = default_value; else x.read(node); }
static Fields* parseCascade(const FileNode &root, const float mins, const float maxs, const int totals, const int method) { static const char *const SC_STAGE_TYPE = "stageType"; static const char *const SC_BOOST = "BOOST"; static const char *const SC_FEATURE_TYPE = "featureType"; static const char *const SC_ICF = "ICF"; static const char *const SC_ORIG_W = "width"; static const char *const SC_ORIG_H = "height"; static const char *const SC_FEATURE_FORMAT = "featureFormat"; static const char *const SC_SHRINKAGE = "shrinkage"; static const char *const SC_OCTAVES = "octaves"; static const char *const SC_OCT_SCALE = "scale"; static const char *const SC_OCT_WEAKS = "weaks"; static const char *const SC_TREES = "trees"; static const char *const SC_WEAK_THRESHOLD = "treeThreshold"; static const char *const SC_FEATURES = "features"; static const char *const SC_INTERNAL = "internalNodes"; static const char *const SC_LEAF = "leafValues"; static const char *const SC_F_CHANNEL = "channel"; static const char *const SC_F_RECT = "rect"; // only Ada Boost supported std::string stageTypeStr = (std::string)root[SC_STAGE_TYPE]; CV_Assert(stageTypeStr == SC_BOOST); // only HOG-like integral channel features supported std::string featureTypeStr = (std::string)root[SC_FEATURE_TYPE]; CV_Assert(featureTypeStr == SC_ICF); int origWidth = (int)root[SC_ORIG_W]; int origHeight = (int)root[SC_ORIG_H]; std::string fformat = (std::string)root[SC_FEATURE_FORMAT]; bool useBoxes = (fformat == "BOX"); ushort shrinkage = cv::saturate_cast<ushort>((int)root[SC_SHRINKAGE]); FileNode fn = root[SC_OCTAVES]; if (fn.empty()) return 0; std::vector<device::Octave> voctaves; std::vector<float> vstages; std::vector<device::Node> vnodes; std::vector<float> vleaves; FileNodeIterator it = fn.begin(), it_end = fn.end(); for (ushort octIndex = 0; it != it_end; ++it, ++octIndex) { FileNode fns = *it; float scale = powf(2.f,saturate_cast<float>((int)fns[SC_OCT_SCALE])); bool isUPOctave = scale >= 1; ushort nweaks = saturate_cast<ushort>((int)fns[SC_OCT_WEAKS]); ushort2 size; size.x = cvRound(origWidth * scale); size.y = cvRound(origHeight * scale); device::Octave octave(octIndex, nweaks, shrinkage, size, scale); CV_Assert(octave.stages > 0); voctaves.push_back(octave); FileNode ffs = fns[SC_FEATURES]; if (ffs.empty()) return 0; std::vector<cv::Rect> feature_rects; std::vector<int> feature_channels; FileNodeIterator ftrs = ffs.begin(), ftrs_end = ffs.end(); int feature_offset = 0; for (; ftrs != ftrs_end; ++ftrs, ++feature_offset ) { cv::FileNode ftn = (*ftrs)[SC_F_RECT]; cv::FileNodeIterator r_it = ftn.begin(); int x = (int)*(r_it++); int y = (int)*(r_it++); int w = (int)*(r_it++); int h = (int)*(r_it++); if (useBoxes) { if (isUPOctave) { w -= x; h -= y; } } else { if (!isUPOctave) { w += x; h += y; } } feature_rects.push_back(cv::Rect(x, y, w, h)); feature_channels.push_back((int)(*ftrs)[SC_F_CHANNEL]); } fns = fns[SC_TREES]; if (fn.empty()) return false; // for each stage (~ decision tree with H = 2) FileNodeIterator st = fns.begin(), st_end = fns.end(); for (; st != st_end; ++st ) { FileNode octfn = *st; float threshold = (float)octfn[SC_WEAK_THRESHOLD]; vstages.push_back(threshold); FileNode intfns = octfn[SC_INTERNAL]; FileNodeIterator inIt = intfns.begin(), inIt_end = intfns.end(); for (; inIt != inIt_end;) { inIt +=2; int featureIdx = (int)(*(inIt++)); float orig_threshold = (float)(*(inIt++)); unsigned int th = saturate_cast<unsigned int>((int)orig_threshold); cv::Rect& r = feature_rects[featureIdx]; uchar4 rect; rect.x = saturate_cast<uchar>(r.x); rect.y = saturate_cast<uchar>(r.y); rect.z = saturate_cast<uchar>(r.width); rect.w = saturate_cast<uchar>(r.height); unsigned int channel = saturate_cast<unsigned int>(feature_channels[featureIdx]); vnodes.push_back(device::Node(rect, channel, th)); } intfns = octfn[SC_LEAF]; inIt = intfns.begin(), inIt_end = intfns.end(); for (; inIt != inIt_end; ++inIt) { vleaves.push_back((float)(*inIt)); } } } cv::Mat hoctaves(1, (int) (voctaves.size() * sizeof(device::Octave)), CV_8UC1, (uchar*)&(voctaves[0])); CV_Assert(!hoctaves.empty()); cv::Mat hstages(cv::Mat(vstages).reshape(1,1)); CV_Assert(!hstages.empty()); cv::Mat hnodes(1, (int) (vnodes.size() * sizeof(device::Node)), CV_8UC1, (uchar*)&(vnodes[0]) ); CV_Assert(!hnodes.empty()); cv::Mat hleaves(cv::Mat(vleaves).reshape(1,1)); CV_Assert(!hleaves.empty()); Fields* fields = new Fields(mins, maxs, totals, origWidth, origHeight, shrinkage, 0, hoctaves, hstages, hnodes, hleaves, method); fields->voctaves = voctaves; fields->createLevels(DEFAULT_FRAME_HEIGHT, DEFAULT_FRAME_WIDTH); return fields; }
void read_params( const FileNode& fn ) { String activ_func_name = (String)fn["activation_function"]; if( !activ_func_name.empty() ) { activ_func = activ_func_name == "SIGMOID_SYM" ? SIGMOID_SYM : activ_func_name == "IDENTITY" ? IDENTITY : activ_func_name == "GAUSSIAN" ? GAUSSIAN : -1; CV_Assert( activ_func >= 0 ); } else activ_func = (int)fn["activation_function_id"]; f_param1 = (double)fn["f_param1"]; f_param2 = (double)fn["f_param2"]; setActivationFunction( activ_func, f_param1, f_param2 ); min_val = (double)fn["min_val"]; max_val = (double)fn["max_val"]; min_val1 = (double)fn["min_val1"]; max_val1 = (double)fn["max_val1"]; FileNode tpn = fn["training_params"]; params = AnnParams(); if( !tpn.empty() ) { String tmethod_name = (String)tpn["train_method"]; if( tmethod_name == "BACKPROP" ) { params.trainMethod = ANN_MLP::BACKPROP; params.bpDWScale = (double)tpn["dw_scale"]; params.bpMomentScale = (double)tpn["moment_scale"]; } else if( tmethod_name == "RPROP" ) { params.trainMethod = ANN_MLP::RPROP; params.rpDW0 = (double)tpn["dw0"]; params.rpDWPlus = (double)tpn["dw_plus"]; params.rpDWMinus = (double)tpn["dw_minus"]; params.rpDWMin = (double)tpn["dw_min"]; params.rpDWMax = (double)tpn["dw_max"]; } else CV_Error(CV_StsParseError, "Unknown training method (should be BACKPROP or RPROP)"); FileNode tcn = tpn["term_criteria"]; if( !tcn.empty() ) { FileNode tcn_e = tcn["epsilon"]; FileNode tcn_i = tcn["iterations"]; params.termCrit.type = 0; if( !tcn_e.empty() ) { params.termCrit.type |= TermCriteria::EPS; params.termCrit.epsilon = (double)tcn_e; } if( !tcn_i.empty() ) { params.termCrit.type |= TermCriteria::COUNT; params.termCrit.maxCount = (int)tcn_i; } } } }