void ImageMolecule::deserialize(const cv::FileNode& fn) { FileNode atoms = fn["atoms"]; CV_Assert(atoms.type() == FileNode::SEQ); std::map<int, Ptr<ImageAtom> > a_map; for (size_t i = 0; i < atoms.size(); i++) { Ptr<ImageAtom> atom(new ImageAtom); atom->deserialize(atoms[i]); a_map[atom->uid()] = atom; //we will insert from pairs... insertAtom(atom); } FileNode pairs = fn["pairs"]; CV_Assert(pairs.type() == FileNode::SEQ); vector<AtomPair> pairs_temp; pairs_temp.resize(pairs.size()); for (size_t i = 0; i < pairs.size(); i++) { pairs_temp[i].deserialize(pairs[i]); pairs_temp[i].setAtom1(a_map[pairs_temp[i].atom1()->uid()]); pairs_temp[i].setAtom2(a_map[pairs_temp[i].atom2()->uid()]); } insertPairs(pairs_temp); }
void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble, CvDTreeTrainData* _data ) { int maxCatCount = ((CvCascadeBoostTrainData*)_data)->featureEvaluator->getMaxCatCount(); int subsetN = (maxCatCount + 31)/32; int step = 3 + ( maxCatCount>0 ? subsetN : 1 ); queue<CvDTreeNode*> internalNodesQueue; FileNodeIterator internalNodesIt, leafValsuesIt; CvDTreeNode* prntNode, *cldNode; clear(); data = _data; ensemble = _ensemble; pruned_tree_idx = 0; // read tree nodes FileNode rnode = node[CC_INTERNAL_NODES]; internalNodesIt = rnode.end(); leafValsuesIt = node[CC_LEAF_VALUES].end(); internalNodesIt--; leafValsuesIt--; for( size_t i = 0; i < rnode.size()/step; i++ ) { prntNode = data->new_node( 0, 0, 0, 0 ); if ( maxCatCount > 0 ) { prntNode->split = data->new_split_cat( 0, 0 ); for( int j = subsetN-1; j>=0; j--) { *internalNodesIt >> prntNode->split->subset[j]; internalNodesIt--; } } else {
void ReadRightRects(vector<ImageRecognition::SlidingRect> &rightRects, const string &xml_filename, RecognitionStatistics &stat) { using namespace Utils; FileStorage file_storage(xml_filename, FileStorage::READ); FileNode images = file_storage["images"]; rightRects.reserve(images.size()); for (FileNodeIterator it = images.begin(); it != images.end(); ++it) { string part_filename = string(*it); int dot_pos = part_filename.find_first_of('.'); if (dot_pos != -1) part_filename = part_filename.substr(0, dot_pos); stringstream ss(part_filename); vector<string> parts; string part; while (getline(ss, part, '_')) parts.push_back(part); rightRects.push_back(ImageRecognition::SlidingRect()); int last = parts.size() - 1; rightRects.back().rect.x = str2int(parts[last - 3]); rightRects.back().rect.y = str2int(parts[last - 2]); rightRects.back().rect.width = str2int(parts[last - 1]); rightRects.back().rect.height = str2int(parts[last]); } }
void loadHist(mH2& hist){ FileStorage fs("test123.xml", FileStorage::READ); FileNode n = fs["ModelHistograms"]; // Loop through Classes for(int i=0;i<n.size();i++){ stringstream ss; ss << "Class_"; ss << i; string a = ss.str(); FileNode n1 = n[a]; // Loop through Each classes Models for(int j = 0; j < n1.size(); j++){ stringstream ss1; ss1 << "Model_"; ss1 << j; string b = ss1.str(); FileNode n2 = n1[b]; // Save stored Mat to mask FileNodeIterator it = n2.begin(), it_end = n2.end(); for(;it != it_end;++it){ Mat mask; (*it) >> hist[i][j]; } } } fs.release(); }
void Expression::load(std::string filename) { FileStorage fs(ofToDataPath(filename), FileStorage::READ); description = (std::string) fs["description"]; FileNode samplesNode = fs["samples"]; int n = samplesNode.size(); samples.resize(n); for(int i = 0; i < n; i++) { samplesNode[i] >> samples[i]; } }
void NMPTUtils::readMatBinary(const FileNode &tm, Mat &mat) { //FileNode tm = fs[name]; int rows = (int)tm["rows"], cols = (int)tm["cols"], type = (int)tm["type"]; mat.create(rows,cols,type); if (rows > 0 && cols > 0) { vector<string> vs; FileNode tl = tm["data"]; //std::cout << tl.type() << std::endl; CV_Assert(tl.type() == FileNode::SEQ); vs.resize(tl.size()); for (size_t i = 0; i < tl.size(); i++) { tl[i] >> vs[i]; } // CV_Assert(tl.size() == (size_t)numRegs); // tm["data"] >> vs; string s; joinString(vs, s); asciiToBinary(s, mat.data, mat.rows*mat.step); }
bool HOGEvaluator::read( const FileNode& node ) { features->resize(node.size()); featuresPtr = &(*features)[0]; FileNodeIterator it = node.begin(), it_end = node.end(); for(int i = 0; it != it_end; ++it, i++) { if(!featuresPtr[i].read(*it)) return false; } return true; }
bool CvCascadeClassifier::readStages( const FileNode &node) { FileNode rnode = node[CC_STAGES]; if (!rnode.empty() || !rnode.isSeq()) return false; stageClassifiers.reserve(numStages); FileNodeIterator it = rnode.begin(); for( int i = 0; i < min( (int)rnode.size(), numStages ); i++, it++ ) { Ptr<CvCascadeBoost> tempStage = makePtr<CvCascadeBoost>(); if ( !tempStage->read( *it, featureEvaluator, *stageParams) ) return false; stageClassifiers.push_back(tempStage); } return true; }
virtual int readRunParams( FileStorage& fs ) { int code = CV_StereoMatchingTest::readRunParams(fs); FileNode fn = fs.getFirstTopLevelNode(); assert(fn.isSeq()); for( int i = 0; i < (int)fn.size(); i+=4 ) { string caseName = fn[i], datasetName = fn[i+1]; RunParams params; string ndisp = fn[i+2]; params.ndisp = atoi(ndisp.c_str()); string iterCount = fn[i+3]; params.iterCount = atoi(iterCount.c_str()); caseNames.push_back( caseName ); caseDatasets.push_back( datasetName ); caseRunParams.push_back( params ); } return code; }
virtual int readRunParams( FileStorage& fs ) { int code = CV_StereoMatchingTest::readRunParams(fs); FileNode fn = fs.getFirstTopLevelNode(); assert(fn.isSeq()); for( int i = 0; i < (int)fn.size(); i+=5 ) { string caseName = fn[i], datasetName = fn[i+1]; RunParams params; string ndisp = fn[i+2]; params.ndisp = atoi(ndisp.c_str()); string winSize = fn[i+3]; params.winSize = atoi(winSize.c_str()); string fullDP = fn[i+4]; params.fullDP = atoi(fullDP.c_str()) == 0 ? false : true; caseNames.push_back( caseName ); caseDatasets.push_back( datasetName ); caseRunParams.push_back( params ); } return code; }
void read(const FileNode& fn) { clear(); read_params(fn["training_params"]); fn["weights"] >> weights; fn["means"] >> means; FileNode cfn = fn["covs"]; FileNodeIterator cfn_it = cfn.begin(); int i, n = (int)cfn.size(); covs.resize(n); for( i = 0; i < n; i++, ++cfn_it ) (*cfn_it) >> covs[i]; decomposeCovs(); computeLogWeightDivDet(); }
bool CvCascadeClassifier::readStages( const FileNode &node) { FileNode rnode = node[CC_STAGES]; if (!rnode.empty() || !rnode.isSeq()) return false; stageClassifiers.reserve(numStages); FileNodeIterator it = rnode.begin(); for( int i = 0; i < min( (int)rnode.size(), numStages ); i++, it++ ) { CvCascadeBoost* tempStage = new CvCascadeBoost; if ( !tempStage->read( *it, (CvFeatureEvaluator *)featureEvaluator, *((CvCascadeBoostParams*)stageParams) ) ) { delete tempStage; return false; } stageClassifiers.push_back(tempStage); } return true; }
int CV_StereoMatchingTest::readDatasetsParams( FileStorage& fs ) { if( !fs.isOpened() ) { ts->printf( CvTS::LOG, "datasetsParams can not be read " ); return CvTS::FAIL_INVALID_TEST_DATA; } datasetsParams.clear(); FileNode fn = fs.getFirstTopLevelNode(); assert(fn.isSeq()); for( int i = 0; i < (int)fn.size(); i+=3 ) { string name = fn[i]; DatasetParams params; string sf = fn[i+1]; params.dispScaleFactor = atoi(sf.c_str()); string uv = fn[i+2]; params.dispUnknVal = atoi(uv.c_str()); datasetsParams[name] = params; } return CvTS::OK; }
int CV_MLBaseTest::read_params( CvFileStorage* __fs ) { FileStorage _fs(__fs, false); if( !_fs.isOpened() ) test_case_count = -1; else { FileNode fn = _fs.getFirstTopLevelNode()["run_params"][modelName]; test_case_count = (int)fn.size(); if( test_case_count <= 0 ) test_case_count = -1; if( test_case_count > 0 ) { dataSetNames.resize( test_case_count ); FileNodeIterator it = fn.begin(); for( int i = 0; i < test_case_count; i++, ++it ) { dataSetNames[i] = (string)*it; } } } return cvtest::TS::OK;; }
void run(int) { double ranges[][2] = {{0, 256}, {-128, 128}, {0, 65536}, {-32768, 32768}, {-1000000, 1000000}, {-10, 10}, {-10, 10}}; RNG& rng = ts->get_rng(); RNG rng0; test_case_count = 4; int progress = 0; MemStorage storage(cvCreateMemStorage(0)); for( int idx = 0; idx < test_case_count; idx++ ) { ts->update_context( this, idx, false ); progress = update_progress( progress, idx, test_case_count, 0 ); cvClearMemStorage(storage); bool mem = (idx % 4) >= 2; string filename = tempfile(idx % 2 ? ".yml" : ".xml"); FileStorage fs(filename, FileStorage::WRITE + (mem ? FileStorage::MEMORY : 0)); int test_int = (int)cvtest::randInt(rng); double test_real = (cvtest::randInt(rng)%2?1:-1)*exp(cvtest::randReal(rng)*18-9); string test_string = "vw wv23424rt\"&<>&'@#$@$%$%&%IJUKYILFD@#$@%$&*&() "; int depth = cvtest::randInt(rng) % (CV_64F+1); int cn = cvtest::randInt(rng) % 4 + 1; Mat test_mat(cvtest::randInt(rng)%30+1, cvtest::randInt(rng)%30+1, CV_MAKETYPE(depth, cn)); rng0.fill(test_mat, CV_RAND_UNI, Scalar::all(ranges[depth][0]), Scalar::all(ranges[depth][1])); if( depth >= CV_32F ) { exp(test_mat, test_mat); Mat test_mat_scale(test_mat.size(), test_mat.type()); rng0.fill(test_mat_scale, CV_RAND_UNI, Scalar::all(-1), Scalar::all(1)); multiply(test_mat, test_mat_scale, test_mat); } CvSeq* seq = cvCreateSeq(test_mat.type(), (int)sizeof(CvSeq), (int)test_mat.elemSize(), storage); cvSeqPushMulti(seq, test_mat.data, test_mat.cols*test_mat.rows); CvGraph* graph = cvCreateGraph( CV_ORIENTED_GRAPH, sizeof(CvGraph), sizeof(CvGraphVtx), sizeof(CvGraphEdge), storage ); int edges[][2] = {{0,1},{1,2},{2,0},{0,3},{3,4},{4,1}}; int i, vcount = 5, ecount = 6; for( i = 0; i < vcount; i++ ) cvGraphAddVtx(graph); for( i = 0; i < ecount; i++ ) { CvGraphEdge* edge; cvGraphAddEdge(graph, edges[i][0], edges[i][1], 0, &edge); edge->weight = (float)(i+1); } depth = cvtest::randInt(rng) % (CV_64F+1); cn = cvtest::randInt(rng) % 4 + 1; int sz[] = {cvtest::randInt(rng)%10+1, cvtest::randInt(rng)%10+1, cvtest::randInt(rng)%10+1}; MatND test_mat_nd(3, sz, CV_MAKETYPE(depth, cn)); rng0.fill(test_mat_nd, CV_RAND_UNI, Scalar::all(ranges[depth][0]), Scalar::all(ranges[depth][1])); if( depth >= CV_32F ) { exp(test_mat_nd, test_mat_nd); MatND test_mat_scale(test_mat_nd.dims, test_mat_nd.size, test_mat_nd.type()); rng0.fill(test_mat_scale, CV_RAND_UNI, Scalar::all(-1), Scalar::all(1)); multiply(test_mat_nd, test_mat_scale, test_mat_nd); } int ssz[] = {cvtest::randInt(rng)%10+1, cvtest::randInt(rng)%10+1, cvtest::randInt(rng)%10+1,cvtest::randInt(rng)%10+1}; SparseMat test_sparse_mat = cvTsGetRandomSparseMat(4, ssz, cvtest::randInt(rng)%(CV_64F+1), cvtest::randInt(rng) % 10000, 0, 100, rng); fs << "test_int" << test_int << "test_real" << test_real << "test_string" << test_string; fs << "test_mat" << test_mat; fs << "test_mat_nd" << test_mat_nd; fs << "test_sparse_mat" << test_sparse_mat; fs << "test_list" << "[" << 0.0000000000001 << 2 << CV_PI << -3435345 << "2-502 2-029 3egegeg" << "{:" << "month" << 12 << "day" << 31 << "year" << 1969 << "}" << "]"; fs << "test_map" << "{" << "x" << 1 << "y" << 2 << "width" << 100 << "height" << 200 << "lbp" << "[:"; const uchar arr[] = {0, 1, 1, 0, 1, 1, 0, 1}; fs.writeRaw("u", arr, (int)(sizeof(arr)/sizeof(arr[0]))); fs << "]" << "}"; cvWriteComment(*fs, "test comment", 0); fs.writeObj("test_seq", seq); fs.writeObj("test_graph",graph); CvGraph* graph2 = (CvGraph*)cvClone(graph); string content = fs.releaseAndGetString(); if(!fs.open(mem ? content : filename, FileStorage::READ + (mem ? FileStorage::MEMORY : 0))) { ts->printf( cvtest::TS::LOG, "filename %s can not be read\n", !mem ? filename.c_str() : content.c_str()); ts->set_failed_test_info( cvtest::TS::FAIL_MISSING_TEST_DATA ); return; } int real_int = (int)fs["test_int"]; double real_real = (double)fs["test_real"]; string real_string = (string)fs["test_string"]; if( real_int != test_int || fabs(real_real - test_real) > DBL_EPSILON*(fabs(test_real)+1) || real_string != test_string ) { ts->printf( cvtest::TS::LOG, "the read scalars are not correct\n" ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } CvMat* m = (CvMat*)fs["test_mat"].readObj(); CvMat _test_mat = test_mat; double max_diff = 0; CvMat stub1, _test_stub1; cvReshape(m, &stub1, 1, 0); cvReshape(&_test_mat, &_test_stub1, 1, 0); vector<int> pt; if( !m || !CV_IS_MAT(m) || m->rows != test_mat.rows || m->cols != test_mat.cols || cvtest::cmpEps( Mat(&stub1), Mat(&_test_stub1), &max_diff, 0, &pt, true) < 0 ) { ts->printf( cvtest::TS::LOG, "the read matrix is not correct: (%.20g vs %.20g) at (%d,%d)\n", cvGetReal2D(&stub1, pt[0], pt[1]), cvGetReal2D(&_test_stub1, pt[0], pt[1]), pt[0], pt[1] ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } if( m && CV_IS_MAT(m)) cvReleaseMat(&m); CvMatND* m_nd = (CvMatND*)fs["test_mat_nd"].readObj(); CvMatND _test_mat_nd = test_mat_nd; if( !m_nd || !CV_IS_MATND(m_nd) ) { ts->printf( cvtest::TS::LOG, "the read nd-matrix is not correct\n" ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } CvMat stub, _test_stub; cvGetMat(m_nd, &stub, 0, 1); cvGetMat(&_test_mat_nd, &_test_stub, 0, 1); cvReshape(&stub, &stub1, 1, 0); cvReshape(&_test_stub, &_test_stub1, 1, 0); if( !CV_ARE_TYPES_EQ(&stub, &_test_stub) || !CV_ARE_SIZES_EQ(&stub, &_test_stub) || //cvNorm(&stub, &_test_stub, CV_L2) != 0 ) cvtest::cmpEps( Mat(&stub1), Mat(&_test_stub1), &max_diff, 0, &pt, true) < 0 ) { ts->printf( cvtest::TS::LOG, "readObj method: the read nd matrix is not correct: (%.20g vs %.20g) vs at (%d,%d)\n", cvGetReal2D(&stub1, pt[0], pt[1]), cvGetReal2D(&_test_stub1, pt[0], pt[1]), pt[0], pt[1] ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } MatND mat_nd2; fs["test_mat_nd"] >> mat_nd2; CvMatND m_nd2 = mat_nd2; cvGetMat(&m_nd2, &stub, 0, 1); cvReshape(&stub, &stub1, 1, 0); if( !CV_ARE_TYPES_EQ(&stub, &_test_stub) || !CV_ARE_SIZES_EQ(&stub, &_test_stub) || //cvNorm(&stub, &_test_stub, CV_L2) != 0 ) cvtest::cmpEps( Mat(&stub1), Mat(&_test_stub1), &max_diff, 0, &pt, true) < 0 ) { ts->printf( cvtest::TS::LOG, "C++ method: the read nd matrix is not correct: (%.20g vs %.20g) vs at (%d,%d)\n", cvGetReal2D(&stub1, pt[0], pt[1]), cvGetReal2D(&_test_stub1, pt[1], pt[0]), pt[0], pt[1] ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } cvRelease((void**)&m_nd); Ptr<CvSparseMat> m_s = (CvSparseMat*)fs["test_sparse_mat"].readObj(); Ptr<CvSparseMat> _test_sparse_ = (CvSparseMat*)test_sparse_mat; Ptr<CvSparseMat> _test_sparse = (CvSparseMat*)cvClone(_test_sparse_); SparseMat m_s2; fs["test_sparse_mat"] >> m_s2; Ptr<CvSparseMat> _m_s2 = (CvSparseMat*)m_s2; if( !m_s || !CV_IS_SPARSE_MAT(m_s) || !cvTsCheckSparse(m_s, _test_sparse,0) || !cvTsCheckSparse(_m_s2, _test_sparse,0)) { ts->printf( cvtest::TS::LOG, "the read sparse matrix is not correct\n" ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } FileNode tl = fs["test_list"]; if( tl.type() != FileNode::SEQ || tl.size() != 6 || fabs((double)tl[0] - 0.0000000000001) >= DBL_EPSILON || (int)tl[1] != 2 || fabs((double)tl[2] - CV_PI) >= DBL_EPSILON || (int)tl[3] != -3435345 || (string)tl[4] != "2-502 2-029 3egegeg" || tl[5].type() != FileNode::MAP || tl[5].size() != 3 || (int)tl[5]["month"] != 12 || (int)tl[5]["day"] != 31 || (int)tl[5]["year"] != 1969 ) { ts->printf( cvtest::TS::LOG, "the test list is incorrect\n" ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } FileNode tm = fs["test_map"]; FileNode tm_lbp = tm["lbp"]; int real_x = (int)tm["x"]; int real_y = (int)tm["y"]; int real_width = (int)tm["width"]; int real_height = (int)tm["height"]; int real_lbp_val = 0; FileNodeIterator it; it = tm_lbp.begin(); real_lbp_val |= (int)*it << 0; ++it; real_lbp_val |= (int)*it << 1; it++; real_lbp_val |= (int)*it << 2; it += 1; real_lbp_val |= (int)*it << 3; FileNodeIterator it2(it); it2 += 4; real_lbp_val |= (int)*it2 << 7; --it2; real_lbp_val |= (int)*it2 << 6; it2--; real_lbp_val |= (int)*it2 << 5; it2 -= 1; real_lbp_val |= (int)*it2 << 4; it2 += -1; CV_Assert( it == it2 ); if( tm.type() != FileNode::MAP || tm.size() != 5 || real_x != 1 || real_y != 2 || real_width != 100 || real_height != 200 || tm_lbp.type() != FileNode::SEQ || tm_lbp.size() != 8 || real_lbp_val != 0xb6 ) { ts->printf( cvtest::TS::LOG, "the test map is incorrect\n" ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } CvGraph* graph3 = (CvGraph*)fs["test_graph"].readObj(); if(graph2->active_count != vcount || graph3->active_count != vcount || graph2->edges->active_count != ecount || graph3->edges->active_count != ecount) { ts->printf( cvtest::TS::LOG, "the cloned or read graph have wrong number of vertices or edges\n" ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } for( i = 0; i < ecount; i++ ) { CvGraphEdge* edge2 = cvFindGraphEdge(graph2, edges[i][0], edges[i][1]); CvGraphEdge* edge3 = cvFindGraphEdge(graph3, edges[i][0], edges[i][1]); if( !edge2 || edge2->weight != (float)(i+1) || !edge3 || edge3->weight != (float)(i+1) ) { ts->printf( cvtest::TS::LOG, "the cloned or read graph do not have the edge (%d, %d)\n", edges[i][0], edges[i][1] ); ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT ); return; } } fs.release(); if( !mem ) remove(filename.c_str()); } }
bool CascadeClassifier::Data::read(const FileNode &root) { static const float THRESHOLD_EPS = 1e-5f; // load stage params String stageTypeStr = (String)root[CC_STAGE_TYPE]; if( stageTypeStr == CC_BOOST ) stageType = BOOST; else return false; String featureTypeStr = (String)root[CC_FEATURE_TYPE]; if( featureTypeStr == CC_HAAR ) featureType = FeatureEvaluator::HAAR; else if( featureTypeStr == CC_LBP ) featureType = FeatureEvaluator::LBP; else if( featureTypeStr == CC_HOG ) featureType = FeatureEvaluator::HOG; else return false; origWinSize.width = (int)root[CC_WIDTH]; origWinSize.height = (int)root[CC_HEIGHT]; CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 ); isStumpBased = (int)(root[CC_STAGE_PARAMS][CC_MAX_DEPTH]) == 1 ? true : false; // load feature params FileNode fn = root[CC_FEATURE_PARAMS]; if( fn.empty() ) return false; ncategories = fn[CC_MAX_CAT_COUNT]; int subsetSize = (ncategories + 31)/32, nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 ); // load stages fn = root[CC_STAGES]; if( fn.empty() ) return false; stages.reserve(fn.size()); classifiers.clear(); nodes.clear(); FileNodeIterator it = fn.begin(), it_end = fn.end(); for( int si = 0; it != it_end; si++, ++it ) { FileNode fns = *it; Stage stage; stage.threshold = (float)fns[CC_STAGE_THRESHOLD] - THRESHOLD_EPS; fns = fns[CC_WEAK_CLASSIFIERS]; if(fns.empty()) return false; stage.ntrees = (int)fns.size(); stage.first = (int)classifiers.size(); stages.push_back(stage); classifiers.reserve(stages[si].first + stages[si].ntrees); FileNodeIterator it1 = fns.begin(), it1_end = fns.end(); for( ; it1 != it1_end; ++it1 ) // weak trees { FileNode fnw = *it1; FileNode internalNodes = fnw[CC_INTERNAL_NODES]; FileNode leafValues = fnw[CC_LEAF_VALUES]; if( internalNodes.empty() || leafValues.empty() ) return false; DTree tree; tree.nodeCount = (int)internalNodes.size()/nodeStep; classifiers.push_back(tree); nodes.reserve(nodes.size() + tree.nodeCount); leaves.reserve(leaves.size() + leafValues.size()); if( subsetSize > 0 ) subsets.reserve(subsets.size() + tree.nodeCount*subsetSize); FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end(); for( ; internalNodesIter != internalNodesEnd; ) // nodes { DTreeNode node; node.left = (int)*internalNodesIter; ++internalNodesIter; node.right = (int)*internalNodesIter; ++internalNodesIter; node.featureIdx = (int)*internalNodesIter; ++internalNodesIter; if( subsetSize > 0 ) { for( int j = 0; j < subsetSize; j++, ++internalNodesIter ) subsets.push_back((int)*internalNodesIter); node.threshold = 0.f; } else { node.threshold = (float)*internalNodesIter; ++internalNodesIter; } nodes.push_back(node); } internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end(); for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves leaves.push_back((float)*internalNodesIter); } } return true; }
int main( int argc, const char** argv ) { CommandLineParser parser(argc, argv, "{ help h usage ? | | show this message }" "{ image i | | (required) path to reference image }" "{ model m | | (required) path to cascade xml file }" "{ data d | | (optional) path to video output folder }" ); // Read in the input arguments if (parser.has("help")){ parser.printMessage(); printLimits(); return 0; } string model(parser.get<string>("model")); string output_folder(parser.get<string>("data")); string image_ref = (parser.get<string>("image")); if (model.empty() || image_ref.empty()){ parser.printMessage(); printLimits(); return -1; } // Value for timing // You can increase this to have a better visualisation during the generation int timing = 1; // Value for cols of storing elements int cols_prefered = 5; // Open the XML model FileStorage fs; bool model_ok = fs.open(model, FileStorage::READ); if (!model_ok){ cerr << "the cascade file '" << model << "' could not be loaded." << endl; return -1; } // Get a the required information // First decide which feature type we are using FileNode cascade = fs["cascade"]; string feature_type = cascade["featureType"]; bool haar = false, lbp = false; if (feature_type.compare("HAAR") == 0){ haar = true; } if (feature_type.compare("LBP") == 0){ lbp = true; } if ( feature_type.compare("HAAR") != 0 && feature_type.compare("LBP")){ cerr << "The model is not an HAAR or LBP feature based model!" << endl; cerr << "Please select a model that can be visualized by the software." << endl; return -1; } // We make a visualisation mask - which increases the window to make it at least a bit more visible int resize_factor = 10; int resize_storage_factor = 10; Mat reference_image = imread(image_ref, IMREAD_GRAYSCALE ); if (reference_image.empty()){ cerr << "the reference image '" << image_ref << "'' could not be loaded." << endl; return -1; } Mat visualization; resize(reference_image, visualization, Size(reference_image.cols * resize_factor, reference_image.rows * resize_factor)); // First recover for each stage the number of weak features and their index // Important since it is NOT sequential when using LBP features vector< vector<int> > stage_features; FileNode stages = cascade["stages"]; FileNodeIterator it_stages = stages.begin(), it_stages_end = stages.end(); int idx = 0; for( ; it_stages != it_stages_end; it_stages++, idx++ ){ vector<int> current_feature_indexes; FileNode weak_classifiers = (*it_stages)["weakClassifiers"]; FileNodeIterator it_weak = weak_classifiers.begin(), it_weak_end = weak_classifiers.end(); vector<int> values; for(int idy = 0; it_weak != it_weak_end; it_weak++, idy++ ){ (*it_weak)["internalNodes"] >> values; current_feature_indexes.push_back( (int)values[2] ); } stage_features.push_back(current_feature_indexes); } // If the output option has been chosen than we will store a combined image plane for // each stage, containing all weak classifiers for that stage. bool draw_planes = false; stringstream output_video; output_video << output_folder << "model_visualization.avi"; VideoWriter result_video; if( output_folder.compare("") != 0 ){ draw_planes = true; result_video.open(output_video.str(), VideoWriter::fourcc('X','V','I','D'), 15, Size(reference_image.cols * resize_factor, reference_image.rows * resize_factor), false); } if(haar){ // Grab the corresponding features dimensions and weights FileNode features = cascade["features"]; vector< vector< rect_data > > feature_data; FileNodeIterator it_features = features.begin(), it_features_end = features.end(); for(int idf = 0; it_features != it_features_end; it_features++, idf++ ){ vector< rect_data > current_feature_rectangles; FileNode rectangles = (*it_features)["rects"]; int nrects = (int)rectangles.size(); for(int k = 0; k < nrects; k++){ rect_data current_data; FileNode single_rect = rectangles[k]; current_data.x = (int)single_rect[0]; current_data.y = (int)single_rect[1]; current_data.w = (int)single_rect[2]; current_data.h = (int)single_rect[3]; current_data.weight = (float)single_rect[4]; current_feature_rectangles.push_back(current_data); } feature_data.push_back(current_feature_rectangles); } // Loop over each possible feature on its index, visualise on the mask and wait a bit, // then continue to the next feature. // If visualisations should be stored then do the in between calculations Mat image_plane; Mat metadata = Mat::zeros(150, 1000, CV_8UC1); vector< rect_data > current_rects; for(int sid = 0; sid < (int)stage_features.size(); sid ++){ if(draw_planes){ int features_nmbr = (int)stage_features[sid].size(); int cols = cols_prefered; int rows = features_nmbr / cols; if( (features_nmbr % cols) > 0){ rows++; } image_plane = Mat::zeros(reference_image.rows * resize_storage_factor * rows, reference_image.cols * resize_storage_factor * cols, CV_8UC1); } for(int fid = 0; fid < (int)stage_features[sid].size(); fid++){ stringstream meta1, meta2; meta1 << "Stage " << sid << " / Feature " << fid; meta2 << "Rectangles: "; Mat temp_window = visualization.clone(); Mat temp_metadata = metadata.clone(); int current_feature_index = stage_features[sid][fid]; current_rects = feature_data[current_feature_index]; Mat single_feature = reference_image.clone(); resize(single_feature, single_feature, Size(), resize_storage_factor, resize_storage_factor); for(int i = 0; i < (int)current_rects.size(); i++){ rect_data local = current_rects[i]; if(draw_planes){ if(local.weight >= 0){ rectangle(single_feature, Rect(local.x * resize_storage_factor, local.y * resize_storage_factor, local.w * resize_storage_factor, local.h * resize_storage_factor), Scalar(0), FILLED); }else{ rectangle(single_feature, Rect(local.x * resize_storage_factor, local.y * resize_storage_factor, local.w * resize_storage_factor, local.h * resize_storage_factor), Scalar(255), FILLED); } } Rect part(local.x * resize_factor, local.y * resize_factor, local.w * resize_factor, local.h * resize_factor); meta2 << part << " (w " << local.weight << ") "; if(local.weight >= 0){ rectangle(temp_window, part, Scalar(0), FILLED); }else{ rectangle(temp_window, part, Scalar(255), FILLED); } } imshow("features", temp_window); putText(temp_window, meta1.str(), Point(15,15), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); result_video.write(temp_window); // Copy the feature image if needed if(draw_planes){ single_feature.copyTo(image_plane(Rect(0 + (fid%cols_prefered)*single_feature.cols, 0 + (fid/cols_prefered) * single_feature.rows, single_feature.cols, single_feature.rows))); } putText(temp_metadata, meta1.str(), Point(15,15), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); putText(temp_metadata, meta2.str(), Point(15,40), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); imshow("metadata", temp_metadata); waitKey(timing); } //Store the stage image if needed if(draw_planes){ stringstream save_location; save_location << output_folder << "stage_" << sid << ".png"; imwrite(save_location.str(), image_plane); } } } if(lbp){ // Grab the corresponding features dimensions and weights FileNode features = cascade["features"]; vector<Rect> feature_data; FileNodeIterator it_features = features.begin(), it_features_end = features.end(); for(int idf = 0; it_features != it_features_end; it_features++, idf++ ){ FileNode rectangle = (*it_features)["rect"]; Rect current_feature ((int)rectangle[0], (int)rectangle[1], (int)rectangle[2], (int)rectangle[3]); feature_data.push_back(current_feature); } // Loop over each possible feature on its index, visualise on the mask and wait a bit, // then continue to the next feature. Mat image_plane; Mat metadata = Mat::zeros(150, 1000, CV_8UC1); for(int sid = 0; sid < (int)stage_features.size(); sid ++){ if(draw_planes){ int features_nmbr = (int)stage_features[sid].size(); int cols = cols_prefered; int rows = features_nmbr / cols; if( (features_nmbr % cols) > 0){ rows++; } image_plane = Mat::zeros(reference_image.rows * resize_storage_factor * rows, reference_image.cols * resize_storage_factor * cols, CV_8UC1); } for(int fid = 0; fid < (int)stage_features[sid].size(); fid++){ stringstream meta1, meta2; meta1 << "Stage " << sid << " / Feature " << fid; meta2 << "Rectangle: "; Mat temp_window = visualization.clone(); Mat temp_metadata = metadata.clone(); int current_feature_index = stage_features[sid][fid]; Rect current_rect = feature_data[current_feature_index]; Mat single_feature = reference_image.clone(); resize(single_feature, single_feature, Size(), resize_storage_factor, resize_storage_factor); // VISUALISATION // The rectangle is the top left one of a 3x3 block LBP constructor Rect resized(current_rect.x * resize_factor, current_rect.y * resize_factor, current_rect.width * resize_factor, current_rect.height * resize_factor); meta2 << resized; // Top left rectangle(temp_window, resized, Scalar(255), 1); // Top middle rectangle(temp_window, Rect(resized.x + resized.width, resized.y, resized.width, resized.height), Scalar(255), 1); // Top right rectangle(temp_window, Rect(resized.x + 2*resized.width, resized.y, resized.width, resized.height), Scalar(255), 1); // Middle left rectangle(temp_window, Rect(resized.x, resized.y + resized.height, resized.width, resized.height), Scalar(255), 1); // Middle middle rectangle(temp_window, Rect(resized.x + resized.width, resized.y + resized.height, resized.width, resized.height), Scalar(255), FILLED); // Middle right rectangle(temp_window, Rect(resized.x + 2*resized.width, resized.y + resized.height, resized.width, resized.height), Scalar(255), 1); // Bottom left rectangle(temp_window, Rect(resized.x, resized.y + 2*resized.height, resized.width, resized.height), Scalar(255), 1); // Bottom middle rectangle(temp_window, Rect(resized.x + resized.width, resized.y + 2*resized.height, resized.width, resized.height), Scalar(255), 1); // Bottom right rectangle(temp_window, Rect(resized.x + 2*resized.width, resized.y + 2*resized.height, resized.width, resized.height), Scalar(255), 1); if(draw_planes){ Rect resized_inner(current_rect.x * resize_storage_factor, current_rect.y * resize_storage_factor, current_rect.width * resize_storage_factor, current_rect.height * resize_storage_factor); // Top left rectangle(single_feature, resized_inner, Scalar(255), 1); // Top middle rectangle(single_feature, Rect(resized_inner.x + resized_inner.width, resized_inner.y, resized_inner.width, resized_inner.height), Scalar(255), 1); // Top right rectangle(single_feature, Rect(resized_inner.x + 2*resized_inner.width, resized_inner.y, resized_inner.width, resized_inner.height), Scalar(255), 1); // Middle left rectangle(single_feature, Rect(resized_inner.x, resized_inner.y + resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); // Middle middle rectangle(single_feature, Rect(resized_inner.x + resized_inner.width, resized_inner.y + resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), FILLED); // Middle right rectangle(single_feature, Rect(resized_inner.x + 2*resized_inner.width, resized_inner.y + resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); // Bottom left rectangle(single_feature, Rect(resized_inner.x, resized_inner.y + 2*resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); // Bottom middle rectangle(single_feature, Rect(resized_inner.x + resized_inner.width, resized_inner.y + 2*resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); // Bottom right rectangle(single_feature, Rect(resized_inner.x + 2*resized_inner.width, resized_inner.y + 2*resized_inner.height, resized_inner.width, resized_inner.height), Scalar(255), 1); single_feature.copyTo(image_plane(Rect(0 + (fid%cols_prefered)*single_feature.cols, 0 + (fid/cols_prefered) * single_feature.rows, single_feature.cols, single_feature.rows))); } putText(temp_metadata, meta1.str(), Point(15,15), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); putText(temp_metadata, meta2.str(), Point(15,40), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); imshow("metadata", temp_metadata); imshow("features", temp_window); putText(temp_window, meta1.str(), Point(15,15), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(255)); result_video.write(temp_window); waitKey(timing); } //Store the stage image if needed if(draw_planes){ stringstream save_location; save_location << output_folder << "stage_" << sid << ".png"; imwrite(save_location.str(), image_plane); } } } return 0; }