void DataSetVOC::importPaulData(CStr &_inDir, CStr &vocDir) { string inDir, outImgDir = vocDir + "JPEGImages/", annoDir = vocDir + "Annotations/"; CmFile::MkDir(outImgDir); CmFile::MkDir(annoDir); CmFile::MkDir(vocDir + "ImageSets/Main/"); vecS namesNE; FILE *f = fopen(_S(_inDir + "Possitive.txt"), "r"); CV_Assert(f != NULL); char fName[1000]; int l, x, y, w, h; while (fscanf(f, "%s %d %d %d %d %d\n", fName, &l, &x, &y, &w, &h) == 6){ string nameNE = CmFile::GetNameNE(fName); namesNE.push_back(nameNE); Mat img = imread(_inDir + fName), imgN; double ratio = 500.0 / max(img.cols, img.rows); resize(img, imgN, Size(), ratio, ratio); imwrite(outImgDir + nameNE + ".jpg", imgN); FileStorage fs(annoDir + nameNE + ".yml", FileStorage::WRITE); fs<<"annotation"<<"{"<<"object"<<"{"<<"bndbox"<<"{"; fs<<"xmin"<<format("'%d'", 1 + cvRound(x*ratio))<<"ymin"<<format("'%d'", 1 + cvRound(y*ratio)); fs<<"xmax"<<format("'%d'", min(imgN.cols, cvRound((x+w)*ratio)))<<"ymax"<<format("'%d'", min(imgN.rows, cvRound((y+h)*ratio))); fs<<"}"<<"name"<<"Salient"<<"}"<<"}"; } fclose(f); int imgNum = namesNE.size(); random_shuffle(namesNE.begin(), namesNE.end()); vecS trainSet(namesNE.begin(), namesNE.begin() + imgNum/2); vecS testSet(namesNE.begin() + imgNum/2, namesNE.end()); CmFile::writeStrList(vocDir + "ImageSets/Main/TrainVal.txt", trainSet); CmFile::writeStrList(vocDir + "ImageSets/Main/Test.txt", testSet); }
bool DataSetVOC::importSaliencyBench(CStr &salDir, CStr &vocDir) { string inDir, outImgDir = vocDir + "JPEGImages/", annoDir = vocDir + "Annotations/"; CmFile::MkDir(outImgDir); CmFile::MkDir(annoDir); CmFile::MkDir(vocDir + "ImageSets/Main/"); vecS namesNE; int imgNum = CmFile::GetNamesNE(salDir + "Src/*.jpg", namesNE, inDir); random_shuffle(namesNE.begin(), namesNE.end()); for (int i = 0; i < imgNum; i++){ CmFile::Copy(inDir + namesNE[i] + ".jpg", outImgDir + namesNE[i] + ".jpg"); Mat mask1u = CmFile::LoadMask(inDir + namesNE[i] + ".png"); Vec4i bb = getMaskRange(mask1u); FileStorage fs(annoDir + namesNE[i] + ".yml", FileStorage::WRITE); fs<<"annotation"<<"{"<<"object"<<"{"<<"bndbox"<<"{"; fs<<"xmin"<<format("'%d'", bb[0])<<"ymin"<<format("'%d'", bb[1])<<"xmax"<<format("'%d'", bb[2])<<"ymax"<<format("'%d'", bb[3]); fs<<"}"<<"name"<<"Salient"<<"}"<<"}"; } vecS trainSet(namesNE.begin(), namesNE.begin() + imgNum/2); vecS testSet(namesNE.begin() + imgNum/2, namesNE.end()); CmFile::writeStrList(vocDir + "ImageSets/Main/TrainVal.txt", trainSet); CmFile::writeStrList(vocDir + "ImageSets/Main/Test.txt", testSet); vecS classNames; classNames.push_back("Salient"); CmFile::writeStrList(vocDir + "ImageSets/Main/Class.txt", classNames); return true; }
void webMain() { testSet(); testMultiSet(); testUnorderedSet(); testPointerSet(); testUnorderedSetOfPointers(); }
int main() { srand(time(0)); if(!testSet()) return 1; if(!testMap()) return 1; cout<<"LockHashMap test succ\n"; return 0; }
void testGet() { testSet(); for(int i=0;i<8;i++){ if(vector_->Get(i)){ LOG4CXX_INFO(Sp::core_logger, "true"); } else{ LOG4CXX_INFO(Sp::core_logger, "false"); } } }
int main(int argc, char **argv) { testTime(); printf("\n"); testClock(); printf("\n"); testSysTime(); printf("\n"); // TODO: timeofday testSet(); printf("\n"); testCopy(); return 0; }
UnlabelledClassificationData UnlabelledClassificationData::partition(UINT trainingSizePercentage){ //Partitions the dataset into a training dataset (which is kept by this instance of the UnlabelledClassificationData) and //a testing/validation dataset (which is return as a new instance of the UnlabelledClassificationData). The trainingSizePercentage //therefore sets the size of the data which remains in this instance and the remaining percentage of data is then added to //the testing/validation dataset //The dataset has changed so flag that any previous cross validation setup will now not work crossValidationSetup = false; crossValidationIndexs.clear(); const UINT numTrainingExamples = (UINT) floor( double(totalNumSamples) / 100.0 * double(trainingSizePercentage) ); UnlabelledClassificationData trainingSet(numDimensions); UnlabelledClassificationData testSet(numDimensions); vector< UINT > indexs( totalNumSamples ); //Create the random partion indexs Random random; UINT indexA = 0; UINT indexB = 0; UINT temp = 0; for(UINT i=0; i<totalNumSamples; i++) indexs[i] = i; for(UINT x=0; x<totalNumSamples*1000; x++){ //Pick two random indexs indexA = random.getRandomNumberInt(0,totalNumSamples); indexB = random.getRandomNumberInt(0,totalNumSamples); //Swap the indexs temp = indexs[ indexA ]; indexs[ indexA ] = indexs[ indexB ]; indexs[ indexB ] = temp; } //Add the data to the training and test sets for(UINT i=0; i<numTrainingExamples; i++){ trainingSet.addSample( data.getRowVector( indexs[i] ) ); } for(UINT i=numTrainingExamples; i<totalNumSamples; i++){ testSet.addSample( data.getRowVector( indexs[i] ) ); } //Overwrite the training data in this instance with the training data of the trainingSet data = trainingSet.getData(); totalNumSamples = trainingSet.getNumSamples(); return testSet; }
int main(int argc, char **argv) { uint16_t i; PQ_PARAM_SET_ID plist[] = {DRAFT_401, DRAFT_439, DRAFT_593, DRAFT_743}; size_t numParams = sizeof(plist)/sizeof(PQ_PARAM_SET_ID); for(i = 0; i<numParams; i++) { testPack(plist[i]); testKeyGen(plist[i]); testSet(plist[i]); } rng_cleanup(); exit(EXIT_SUCCESS); }
LabelledRegressionData LabelledRegressionData::partition(const UINT trainingSizePercentage){ //Partitions the dataset into a training dataset (which is kept by this instance of the LabelledRegressionData) and //a testing/validation dataset (which is return as a new instance of the LabelledRegressionData). The trainingSizePercentage //therefore sets the size of the data which remains in this instance and the remaining percentage of data is then added to //the testing/validation dataset const UINT numTrainingExamples = (UINT) floor( double(totalNumSamples) / 100.0 * double(trainingSizePercentage) ); LabelledRegressionData trainingSet(numInputDimensions,numTargetDimensions); LabelledRegressionData testSet(numInputDimensions,numTargetDimensions); vector< UINT > indexs( totalNumSamples ); //Create the random partion indexs Random random; UINT randomIndex = 0; for(UINT i=0; i<totalNumSamples; i++) indexs[i] = i; for(UINT x=0; x<totalNumSamples; x++){ randomIndex = random.getRandomNumberInt(0,totalNumSamples); SWAP( indexs[ x ] , indexs[ randomIndex ] ); } //Add the data to the training and test sets for(UINT i=0; i<numTrainingExamples; i++){ trainingSet.addSample( data[ indexs[i] ].getInputVector(), data[ indexs[i] ].getTargetVector() ); } for(UINT i=numTrainingExamples; i<totalNumSamples; i++){ testSet.addSample( data[ indexs[i] ].getInputVector(), data[ indexs[i] ].getTargetVector() ); } //Overwrite the training data in this instance with the training data of the trainingSet data = trainingSet.getData(); totalNumSamples = trainingSet.getNumSamples(); //The dataset has changed so flag that any previous cross validation setup will now not work crossValidationSetup = false; crossValidationIndexs.clear(); return testSet; }
void create( size_t countCards, size_t countSets, std::vector<card>& cards, std::vector<set>& sets ) { while( true ) { sets.clear(); cards.clear(); std::random_shuffle( _cards.begin(), _cards.end() ); for( size_t f = 0; f < countCards; f++ ) { cards.push_back( _cards.at( f ) ); } for( size_t c1 = 0; c1 < cards.size() - 2; c1++ ) { for( size_t c2 = c1 + 1; c2 < cards.size() - 1; c2++ ) { for( size_t c3 = c2 + 1; c3 < cards.size(); c3++ ) { if( testSet( &cards.at( c1 ), &cards.at( c2 ), &cards.at( c3 ) ) ) { set s; s.index.push_back( c1 ); s.index.push_back( c2 ); s.index.push_back( c3 ); sets.push_back( s ); } } } } if( sets.size() == countSets ) return; } }
int main(int argc, char** argv) { #ifdef PARALLEL_CORES omp_set_num_threads(PARALLEL_CORES); #endif std::string directory = "./"; if(argc > 1) directory = std::string(argv[1]); IDXLoader loader(28, 28, 10000, 1, directory); OpenANN::DirectStorageDataSet trainSet(&loader.trainingInput, &loader.trainingInput); int H = 196; OpenANN::SparseAutoEncoder sae(loader.D, H, 3.0, 0.1, 3e-3, OpenANN::LOGISTIC); sae.trainingSet(trainSet); OpenANN::LBFGS optimizer(20); OpenANN::StoppingCriteria stop; stop.maximalIterations = 400; optimizer.setOptimizable(sae); optimizer.setStopCriteria(stop); optimizer.optimize(); OpenANN::MulticlassEvaluator evaluator(1, OpenANN::Logger::FILE); OpenANN::DirectStorageDataSet testSet(&loader.testInput, &loader.testInput, &evaluator); sae.validationSet(testSet); QApplication app(argc, argv); SparseAutoEncoderVisualization visual(sae, trainSet, H, 5, 7, 800, 600); visual.show(); visual.resize(800, 600); return app.exec(); }
int main(int argc, char **argv) { OptionParser opts; string mapFile, evidFile;//interactFile,ignoreFile; int factor; opts.addOption(new StringOption("map", "--map <filename> : map file", "../input/grid.bmp", mapFile, false)); opts.addOption(new StringOption("evidence", "--evidence <filename> : evidence file", "", evidFile, true)); opts.addOption(new IntOption("factor", "--factor <int> : scaling factor", 1, factor, true)); opts.parse(argc,argv); cout << "Loading Map File"<<endl; BMPFile bmpFile(mapFile); Grid grid(bmpFile, black); // cout << "xdim: "<<grid.dims().first<<" yDim: "<<grid.dims().second<<endl; cout << "Loading Evidence"<<endl; //Evidence trainSet(evidFile, grid, factor); /* used when need to train two seperate models Evidence evid_int(interactFile, grid, factor); Evidence evid_ig(ignoreFile, grid, factor); Evidence train_int(grid),test_int(grid),train_ig(grid), test_ig(grid); evid_int.split(train_int, test_int, 0.05); evid_ig.split(train_ig, test_ig, 0.05); */ Evidence evid(evidFile,grid,factor); Evidence trainSet(grid),testSet(grid); evid.split(trainSet,testSet,0.05); cout<<"Optimize over "<<trainSet.size()<<" examples"<<endl; #if 0 for (int i=0; i < evid.size(); i++) { cout << "Evid "<<i<<endl; vector<pair<int, int> > traj = evid.at(i); vector<double> timestamps = evid.getTimes(i); cout << timestamps.size()<<" "<<traj.size()<<endl; for (int j=0; j < traj.size(); j++) { cout << timestamps.at(j)<<" "<<traj.at(j).first << " "<<traj.at(j).second<<endl; } } #endif // testSet.write("testTraj.data"); cout << "Generating Feature Set"<<endl; vector<PosFeature> features; cout << " Constant Feature"<<endl; ConstantFeature constFeat(grid); features.push_back(constFeat); cout << " Obstacle Feature"<<endl; ObstacleFeature obsFeat(grid); features.push_back(obsFeat); for (int i=1; i < 5; i++) { cout << " Blur Feature "<<i<<endl; ObstacleBlurFeature blurFeat(grid, 5*i); features.push_back(blurFeat); } /* cout << " Robot Feature"<<endl; RobotGlobalFeature robglobal(grid,snackbot,factor); features.push_back(robglobal); // robot local blurres features for (int i=1; i < 5; i++) { cout << " RobotBlur Feature "<<i<<endl; RobotLocalBlurFeature robblur(grid,snackbot,5*i,factor); features.push_back(robblur); } */ /* cout << " Creating feature array"<<endl; FeatureArray featArray2(features); cout << " Creating lower resolution feature array"<<endl; FeatureArray featArray(featArray2, factor); */ cout << " Speed Feature"<<endl; vector<double> speedTable(2,0.0); speedTable.at(1) = 0.75; //speedTable.at(2) = 1.1; DisVecSeqFeature speedfeat(speedTable); /* Robset training weights: * -3.83 -8.35991 -2.6512 -5.43475 -3.15203 -3.29758 * 0.596987 0.439284 * 0.589445 -0.82448 * Non-robot-ending trainng weights: * -4.57257 -6.2 -0.3537 -2.7385 -0.9357 -0.2797 * -0.495205 -0.2863 * -1.2225 0.43993 */ vector<double> weights(6+2+2, -0.0); weights.at(0) = -25; weights.at(1) = -8.36; weights.at(2) = -2.65; weights.at(3) = -5.43; weights.at(4) = -3.17; weights.at(5) = -3.34; weights.at(6) = 0.5; // robot feature weights.at(7) = 0.3; // robot feature weights.at(8) = -0.29; // velocity feature weights.at(9) = -1.11; // velocity feature //weights.push_back(1.5);//the last parameter is for velocity feature Parameters params(weights); DisSeqOrderInferEngine engine(8,InferenceEngine::GRID8); trajOptimizerplus optimizer(grid,trainSet,features,speedfeat,engine); optimizer.optimize(params,0.005,1000,1.0,OPT_EXP); return 0; }
TimeSeriesClassificationData TimeSeriesClassificationData::split(const UINT trainingSizePercentage,const bool useStratifiedSampling){ //Partitions the dataset into a training dataset (which is kept by this instance of the TimeSeriesClassificationData) and //a testing/validation dataset (which is return as a new instance of the TimeSeriesClassificationData). The trainingSizePercentage //therefore sets the size of the data which remains in this instance and the remaining percentage of data is then added to //the testing/validation dataset //The dataset has changed so flag that any previous cross validation setup will now not work crossValidationSetup = false; crossValidationIndexs.clear(); TimeSeriesClassificationData trainingSet(numDimensions); TimeSeriesClassificationData testSet(numDimensions); trainingSet.setAllowNullGestureClass(allowNullGestureClass); testSet.setAllowNullGestureClass(allowNullGestureClass); Vector< UINT > indexs( totalNumSamples ); //Create the random partion indexs Random random; UINT randomIndex = 0; if( useStratifiedSampling ){ //Break the data into seperate classes Vector< Vector< UINT > > classData( getNumClasses() ); //Add the indexs to their respective classes for(UINT i=0; i<totalNumSamples; i++){ classData[ getClassLabelIndexValue( data[i].getClassLabel() ) ].push_back( i ); } //Randomize the order of the indexs in each of the class index buffers for(UINT k=0; k<getNumClasses(); k++){ UINT numSamples = (UINT)classData[k].size(); for(UINT x=0; x<numSamples; x++){ //Pick a random index randomIndex = random.getRandomNumberInt(0,numSamples); //Swap the indexs SWAP( classData[k][ x ] ,classData[k][ randomIndex ] ); } } //Loop over each class and add the data to the trainingSet and testSet for(UINT k=0; k<getNumClasses(); k++){ UINT numTrainingExamples = (UINT) floor( Float(classData[k].size()) / 100.0 * Float(trainingSizePercentage) ); //Add the data to the training and test sets for(UINT i=0; i<numTrainingExamples; i++){ trainingSet.addSample( data[ classData[k][i] ].getClassLabel(), data[ classData[k][i] ].getData() ); } for(UINT i=numTrainingExamples; i<classData[k].size(); i++){ testSet.addSample( data[ classData[k][i] ].getClassLabel(), data[ classData[k][i] ].getData() ); } } //Overwrite the training data in this instance with the training data of the trainingSet data = trainingSet.getClassificationData(); totalNumSamples = trainingSet.getNumSamples(); }else{ const UINT numTrainingExamples = (UINT) floor( Float(totalNumSamples) / 100.0 * Float(trainingSizePercentage) ); //Create the random partion indexs Random random; for(UINT i=0; i<totalNumSamples; i++) indexs[i] = i; for(UINT x=0; x<totalNumSamples; x++){ //Pick a random index randomIndex = random.getRandomNumberInt(0,totalNumSamples); //Swap the indexs SWAP( indexs[ x ] , indexs[ randomIndex ] ); } //Add the data to the training and test sets for(UINT i=0; i<numTrainingExamples; i++){ trainingSet.addSample( data[ indexs[i] ].getClassLabel(), data[ indexs[i] ].getData() ); } for(UINT i=numTrainingExamples; i<totalNumSamples; i++){ testSet.addSample( data[ indexs[i] ].getClassLabel(), data[ indexs[i] ].getData() ); } //Overwrite the training data in this instance with the training data of the trainingSet data = trainingSet.getClassificationData(); totalNumSamples = trainingSet.getNumSamples(); } return testSet; }
int main (int argc, char ** argv) { printf ("MATHCHECK TESTS\n"); printf ("==================\n\n"); init (argc, argv); KeySet * ks = create_ks ("153", "== + ../bla/val1 + ../bla/val2 ../bla/val3"); test (ks, 1); ksDel (ks); ks = create_ks ("250", "< + ../bla/val1 + ../bla/val2 ../bla/val3"); test (ks, (-1)); ksDel (ks); ks = create_ks ("250", ">= + @/bla/val1 + @/bla/val2 @/bla/val3"); test (ks, 1); ksDel (ks); ks = create_ks ("2", "== / @/bla/val1 @/bla/val2"); test (ks, 1); ksDel (ks); ks = create_ks ("", ":= / @/bla/val1 @/bla/val2"); testSet (ks, "2"); ksDel (ks); ks = create_ks ("1", "== / ../bla/val1 ../bla/val3"); test (ks, (-1)); ksDel (ks); ks = create_ks ("3", "== + '1.5' '1.5'"); test (ks, 1); ksDel (ks); ks = create_ks ("4.5", "== + '1.5' + '1.5' '1.5'"); test (ks, 1); ksDel (ks); ks = create_ks ("", ":= + '1.5' + '1.5' '1.5'"); testSet (ks, "4.5"); ksDel (ks); ks = create_ks ("1", "== + '1.5' '1.5'"); test (ks, (-1)); ksDel (ks); ks = create_ks ("10", "== + ../bla/val3 '7'"); test (ks, 1); ksDel (ks); ks = create_ks ("7", "== + @/bla/nonExisting '7'"); test (ks, 1); ksDel (ks); ks = create_ks ("", ":= + @/bla/nonExisting '7'"); testSet (ks, "7"); ksDel (ks); ks = create_ks ("7", "== * @/bla/nonExisting '7'"); test (ks, 1); ksDel (ks); ks = create_ks ("3", "== + ../bla/nonExisting + ../bla/nonExistingToo ../bla/val3"); test (ks, 1); ksDel (ks); ks = create_ks ("", ":= + ../bla/nonExisting + ../bla/nonExistingToo ../bla/val3"); testSet (ks, "3"); ksDel (ks); ks = create_ks ("3", "== / @/bla/nonExisting / ../bla/nonExistingToo @/bla/val3"); test (ks, 1); ksDel (ks); ks = create_ks ("3", "== + @/bla/nonExisting / ../bla/val3 ../bla/nonExistingToo"); test (ks, 1); ksDel (ks); test_multiUp (); printf ("\ntestmod_mathcheck RESULTS: %d test(s) done. %d error(s).\n", nbTest, nbError); char buffer[24]; elektraFtoA (buffer, sizeof (buffer), (1.5)); succeed_if (!(strcmp (buffer, "1.5")), "elektraFtoA failed"); fprintf (stderr, "elektraFtoA: val: %g, ret: %s\n", (1.5), buffer); fprintf (stderr, "elektraEFtoF: string: %s, ret: %g\n", buffer, elektraEFtoF (buffer)); succeed_if ((elektraEFtoF (buffer) - (1.5)) < 0.00001, "elektraEFtoF failed"); return nbError; }
void testSimple() { testSet(false, 10000, true); testSet(true, 10000, true); }
ClassificationData ClassificationData::split(const UINT trainingSizePercentage,const bool useStratifiedSampling){ //Partitions the dataset into a training dataset (which is kept by this instance of the ClassificationData) and //a testing/validation dataset (which is return as a new instance of the ClassificationData). The trainingSizePercentage //therefore sets the size of the data which remains in this instance and the remaining percentage of data is then added to //the testing/validation dataset //The dataset has changed so flag that any previous cross validation setup will now not work crossValidationSetup = false; crossValidationIndexs.clear(); ClassificationData trainingSet(numDimensions); ClassificationData testSet(numDimensions); trainingSet.setAllowNullGestureClass( allowNullGestureClass ); testSet.setAllowNullGestureClass( allowNullGestureClass ); //Create the random partion indexs Random random; UINT randomIndex = 0; UINT K = getNumClasses(); if( useStratifiedSampling ){ //Break the data into seperate classes Vector< Vector< UINT > > classData( K ); //Add the indexs to their respective classes for(UINT i=0; i<totalNumSamples; i++){ classData[ getClassLabelIndexValue( data[i].getClassLabel() ) ].push_back( i ); } //Randomize the order of the indexs in each of the class index buffers for(UINT k=0; k<K; k++){ std::random_shuffle(classData[k].begin(), classData[k].end()); } //Reserve the memory UINT numTrainingSamples = 0; UINT numTestSamples = 0; for(UINT k=0; k<K; k++){ UINT numTrainingExamples = (UINT) floor( Float(classData[k].size()) / 100.0 * Float(trainingSizePercentage) ); UINT numTestExamples = ((UINT)classData[k].size())-numTrainingExamples; numTrainingSamples += numTrainingExamples; numTestSamples += numTestExamples; } trainingSet.reserve( numTrainingSamples ); testSet.reserve( numTestSamples ); //Loop over each class and add the data to the trainingSet and testSet for(UINT k=0; k<K; k++){ UINT numTrainingExamples = (UINT) floor( Float(classData[k].getSize()) / 100.0 * Float(trainingSizePercentage) ); //Add the data to the training and test sets for(UINT i=0; i<numTrainingExamples; i++){ trainingSet.addSample( data[ classData[k][i] ].getClassLabel(), data[ classData[k][i] ].getSample() ); } for(UINT i=numTrainingExamples; i<classData[k].getSize(); i++){ testSet.addSample( data[ classData[k][i] ].getClassLabel(), data[ classData[k][i] ].getSample() ); } } }else{ const UINT numTrainingExamples = (UINT) floor( Float(totalNumSamples) / 100.0 * Float(trainingSizePercentage) ); //Create the random partion indexs UINT randomIndex = 0; Vector< UINT > indexs( totalNumSamples ); for(UINT i=0; i<totalNumSamples; i++) indexs[i] = i; std::random_shuffle(indexs.begin(), indexs.end()); //Reserve the memory trainingSet.reserve( numTrainingExamples ); testSet.reserve( totalNumSamples-numTrainingExamples ); //Add the data to the training and test sets for(UINT i=0; i<numTrainingExamples; i++){ trainingSet.addSample( data[ indexs[i] ].getClassLabel(), data[ indexs[i] ].getSample() ); } for(UINT i=numTrainingExamples; i<totalNumSamples; i++){ testSet.addSample( data[ indexs[i] ].getClassLabel(), data[ indexs[i] ].getSample() ); } } //Overwrite the training data in this instance with the training data of the trainingSet *this = trainingSet; //Sort the class labels in this dataset sortClassLabels(); //Sort the class labels of the test dataset testSet.sortClassLabels(); return testSet; }
string predictDigits(Mat &originalImage) { string numbers = ""; Mat clon = originalImage.clone(); // Read the model from the XML file and create the neural network. CvANN_MLP nnetwork; CvFileStorage* storage = cvOpenFileStorage( "/home/andersson/Escritorio/Temporales/neural_network.xml", 0, CV_STORAGE_READ); CvFileNode *n = cvGetFileNodeByName(storage, 0, "DigitOCR"); nnetwork.read(storage, n); cvReleaseFileStorage(&storage); int rows = originalImage.rows; int cols = originalImage.cols; int lx = 0; int ty = 0; int by = 0; int rx = 0; int flag = 0; int currentColumn = 1; bool temp = false; while (!temp) { /* Left X */ for (int i = currentColumn; i < cols; i++) { for (int j = 1; j < rows; j++) { if (i != (cols - 1)) { if (originalImage.at<uchar> (j, i) == 0) { lx = i; flag = 1; break; } } else { temp = true; break; } } if (!temp) { if (flag == 1) { flag = 0; break; } } else { break; } } if (temp) { continue; } /* Right X */ int tempNum; for (int i = lx; i < cols; i++) { tempNum = 0; for (int j = 1; j < rows; j++) { if (originalImage.at<uchar> (j, i) == 0) { tempNum += 1; } } if (tempNum == 0) { rx = (i - 1); break; } } currentColumn = rx + 1; /* Top Y */ for (int i = 1; i < rows; i++) { for (int j = lx; j <= rx; j++) { if (originalImage.at<uchar> (i, j) == 0) { ty = i; flag = 1; break; } } if (flag == 1) { flag = 0; break; } } /* Bottom Y */ for (int i = (rows - 1); i >= 1; i--) { for (int j = lx; j <= rx; j++) { if (originalImage.at<uchar> (i, j) == 0) { by = i; flag = 1; break; } } if (flag == 1) { flag = 0; break; } } int width = rx - lx; int height = by - ty; // Cropping image Mat crop(originalImage, Rect(lx, ty, width, height)); // Cloning image Mat splittedImage; splittedImage = crop.clone(); // imwrite("/home/andersson/Escritorio/Temporales/splitted.png", // splittedImage); // Processing image Mat output; cv::GaussianBlur(splittedImage, output, cv::Size(5, 5), 0); cv::threshold(output, output, 50, ATTRIBUTES - 1, 0); cv::Mat scaledDownImage(ROWCOLUMN, ROWCOLUMN, CV_8U, cv::Scalar(0)); scaleDownImage(output, scaledDownImage); int pixelValueArray[ATTRIBUTES]; cv::Mat testSet(1, ATTRIBUTES, CV_32F); // Mat to Pixel Value Array convertToPixelValueArray(scaledDownImage, pixelValueArray); // Pixel Value Array to Mat CV_32F cv::Mat classificationResult(1, CLASSES, CV_32F); for (int i = 0; i <= ATTRIBUTES; i++) { testSet.at<float> (0, i) = pixelValueArray[i]; } // Predicting the number nnetwork.predict(testSet, classificationResult); // Selecting the correct response int maxIndex = 0; float value = 0.0f; float maxValue = classificationResult.at<float> (0, 0); for (int index = 1; index < CLASSES; index++) { value = classificationResult.at<float> (0, index); if (value > maxValue) { maxValue = value; maxIndex = index; } } printf("Class result: %d\n", maxIndex); numbers = numbers + convertIntToString(maxIndex); Scalar colorRect = Scalar(0.0, 0.0, 255.0); rectangle(clon, Point(lx, ty), Point(rx, by), colorRect, 1, 8, 0); namedWindow("Clon", CV_WINDOW_NORMAL); imshow("Clon", clon); waitKey(0); namedWindow("Test", CV_WINDOW_NORMAL); imshow("Test", splittedImage); waitKey(0); } imwrite("/home/andersson/Escritorio/Temporales/clon.png", clon); return numbers; }
int main(int argc, char **argv) { OptionParser opts; string mapFile,trainFile,testFile; int factor = 1; double step; opts.addOption(new StringOption("map", "--map <filename> : map file", "../input/grid.bmp", mapFile, false)); opts.addOption(new StringOption("evidence", "--test evidence <filename> : evidence file", "", testFile, true)); opts.addOption(new DoubleOption("step", "--step <double> : inference interval", 1.0, step, true)); opts.parse(argc,argv); JetColorMap jet; RGBTRIPLE black = {0,0,0}; RGBTRIPLE white = {255,255,255}; RGBTRIPLE red; red.R = 255; red.G = 0; red.B = 0; RGBTRIPLE blue; blue.R = 0; blue.G = 0; blue.B = 255; RGBTRIPLE green; green.R = 0; green.G = 255; green.B = 0; RGBTRIPLE initialColor; initialColor.R = 111; initialColor.G = 49; initialColor.B = 152; RGBTRIPLE currentColor; currentColor.R = 181; currentColor.G = 165; currentColor.B = 213; RGBTRIPLE magenta; magenta.R = 255; magenta.G = 0; magenta.B = 255; RGBTRIPLE cyan; cyan.R = 0; cyan.G = 255; cyan.B = 255; RGBTRIPLE yellow; yellow.R = 255; yellow.G = 255; yellow.B = 0; BMPFile bmpFile(mapFile); Grid grid(bmpFile, black); Evidence testSet(testFile, grid, factor); // Evidence trainSet(trainFile, grid, factor); pair<int, int> dims = grid.dims(); cout << " Speed Feature"<<endl; vector<double> speedTable(VEL_DIM,0.0); speedTable.at(1) = 0.75; DisVecSeqFeature speedfeat(speedTable); vector<int> dimensions; dimensions.push_back(dims.first); dimensions.push_back(dims.second); dimensions.push_back(VEL_DIM); /* **************************************** * INITIALIZE MARKOV DECESION PROCESS * BASED MODEL PARAMETERS * ****************************************/ vector<double> p_weights(NUMPOSFEAT,-0.0); p_weights.at(0) = -2.23; //-2.23 for PPP forecast p_weights.at(1) = -6.2; p_weights.at(2) = -0.35; p_weights.at(3) = -2.73; p_weights.at(4) = -0.92; p_weights.at(5) = -0.26; vector<double> r_PosWeights(NUMPOSFEAT+NUMROBFEAT, -0.0); r_PosWeights.at(0) = -3.83; r_PosWeights.at(1) = -8.36; r_PosWeights.at(2) = -2.65; r_PosWeights.at(3) = -5.43; r_PosWeights.at(4) = -3.15; r_PosWeights.at(5) = -3.30; //r_PosWeights.at(6) = 0.60; //r_PosWeights.at(7) = 0.45; vector<double> nr_PosWeights(NUMPOSFEAT+NUMROBFEAT, -0.0); nr_PosWeights.at(0) = -4.51; nr_PosWeights.at(1) = -6.2; nr_PosWeights.at(2) = -0.35; nr_PosWeights.at(3) = -2.73; nr_PosWeights.at(4) = -0.93; nr_PosWeights.at(5) = -0.28; //nr_PosWeights.at(6) = -0.50; //nr_PosWeights.at(7) = -0.286; vector<double> r_SeqWeights(VEL_DIM, -0.0); r_SeqWeights.at(0) = 0.59; r_SeqWeights.at(1) = -0.83; vector<double> nr_SeqWeights(VEL_DIM, -0.0); nr_SeqWeights.at(0) = -1.21; nr_SeqWeights.at(1) = 0.49; Parameters p(p_weights); Parameters r_Pos(r_PosWeights); Parameters nr_Pos(nr_PosWeights); Parameters r_Seq(r_SeqWeights); Parameters nr_Seq(nr_SeqWeights); /* **************************************** * INITIALIZE LINEAR QUADRATIC CONTROL * BASED MODEL PARAMETERS * ****************************************/ M_6 A; A.setZero(); A(0,0) = 1; A(1,1) = 1; A(4,2) = -1; A(5,3) = -1; M_6_2 B; B<<1,0, 0,1, 1,0, 0,1, 1,0, 0,1; M_6 costM; ifstream infile("../params/nonrob2000.dat"); for(int row=0;row<costM.rows();row++){ for(int col=0;col<costM.cols();col++){ double temp; infile>>temp; costM(row,col) = temp; } } infile.close(); M_6 sigma; sigma<<0.001,0,0,0,0,0, 0,0.001,0,0,0,0, 0,0,0.005,0,0,0, 0,0,0,0.005,0,0, 0,0,0,0,0.005,0, 0,0,0,0,0,0.005; /* **************************************** * DECLARATION OF INFERENCE ENGINES * ****************************************/ OrderedWaveInferenceEngine pp(InferenceEngine::GRID8); DisSeqOrderInferEngine mdpr(InferenceEngine::GRID8); DisSeqOrderInferEngine mdpnr(InferenceEngine::GRID8); ContinuousState cState; LQControlInference lq(A,B,sigma,costM,cState); lq.valueInference(); IntentRecognizer IR(grid,p,r_Pos,r_Seq,nr_Pos,nr_Seq, speedfeat,pp,mdpr,mdpnr,lq); cout << testSet.size() <<" Examples"<<endl; for (int i=0; i < testSet.size(); i++) { vector<pair<int, int> > & traj = testSet.at(i); vector<double> & vels = testSet.at_v(i); vector<double> times = testSet.getTimes(i); pair<int,int> & botinGrid = testSet.at_bot(i); vector<pair<double,double> > & obs = testSet.at_raw(i); vector<double> & rawTimes = testSet.at_rawTime(i); IR.combineForecast(traj,vels,obs,times,rawTimes, botinGrid,i,step); } }
int main(int argc, char **argv) { OptionParser opts; string mapFile, evidFile; int factor; opts.addOption(new StringOption("map", "--map <filename> : map file", "../input/grid.bmp", mapFile, false)); opts.addOption(new StringOption("evidence", "--evidence <filename> : evidence file", "", evidFile, true)); opts.addOption(new IntOption("factor", "--factor <int> : scaling factor", 1, factor, true)); opts.parse(argc,argv); JetColorMap jet; RGBTRIPLE black = {0,0,0}; RGBTRIPLE white = {255,255,255}; RGBTRIPLE red; red.R = 255; red.G = 0; red.B = 0; RGBTRIPLE blue; blue.R = 0; blue.G = 0; blue.B = 255; RGBTRIPLE green; green.R = 0; green.G = 255; green.B = 0; RGBTRIPLE initialColor; initialColor.R = 111; initialColor.G = 49; initialColor.B = 152; // initialColor.G = 152; // initialColor.B = 49; RGBTRIPLE currentColor; currentColor.R = 181; currentColor.G = 165; currentColor.B = 213; // currentColor.G = 213; // currentColor.B = 165; RGBTRIPLE magenta; magenta.R = 255; magenta.G = 0; magenta.B = 255; RGBTRIPLE cyan; cyan.R = 0; cyan.G = 255; cyan.B = 255; RGBTRIPLE yellow; yellow.R = 255; yellow.G = 255; yellow.B = 0; BMPFile bmpFile(mapFile); Grid grid(bmpFile, black); Evidence testSet(evidFile, grid, factor); /* if (1) { evid.split(trainSet, testSet, 0.8); }else{ evid.deterministicsplit(trainSet, testSet); }*/ #if 0 cout << "Creating Markov Model"<<endl; MarkovModel markmodel(grid, trainSet); double totalObj = 0.0; for (int i=0; i < testSet.size(); i++) { vector<pair<int, int> > path = testSet.at(i); cout << "Calling eval"<<endl; double obj = markmodel.eval(path); cout << "OBJ: "<<i<<" "<<obj<<endl; totalObj += obj; } cout << "TOTAL OBJ: "<<totalObj<<endl; cout << "AVERAGE OBJ: "<<totalObj/testSet.size()<<endl; return 0; #endif vector<PosFeature> features; cout << "Constant Feature"<<endl; ConstantFeature constFeat(grid); features.push_back(constFeat); cout << "Obstacle Feature"<<endl; ObstacleFeature obsFeat(grid); features.push_back(obsFeat); for (int i=1; i < 5; i++) { cout << "Blur Feature "<<i<<endl; ObstacleBlurFeature blurFeat(grid, 5*i); features.push_back(blurFeat); } cout << "Creating feature array"<<endl; FeatureArray featArray2(features); cout << "Creating lower resolution feature array"<<endl; FeatureArray featArray(featArray2, factor); pair<int, int> dims = grid.dims(); pair<int, int> lowDims((int)ceil((float)dims.first/factor), (int)ceil((float)dims.second/factor)); vector<double> weights(features.size(), -0.0); weights.at(1) = -6.2; //for (int i=2; i < weights.size(); i++) // weights.at(i) = -1.0; weights.at(0) = -2.23;//-2.23 weights.at(2) = -0.35; weights.at(3) = -2.73; weights.at(4) = -0.92; weights.at(5) = -0.26; Parameters params(weights); OrderedWaveInferenceEngine engine(InferenceEngine::GRID8); vector<vector<double> > prior(dims.first,vector<double> (dims.second,0.0)); /* double divide = 1.0; vector<double> radiusWeight; for (int i=0; i < 20; i++) { radiusWeight.push_back(1.0/divide); divide*=2; } generatePrior(grid, trainSet, priorOrig, radiusWeight, factor); reducePrior(priorOrig, prior, factor); */ vector<vector<vector<double> > > partition, backpartition; int time0 = time(0); BMPFile gridView(dims.first, dims.second); RewardMap rewards(featArray, params); vector<double> sums(params.size(),0.00001); vector<vector<double> > occupancy; Predictor predictor(grid, rewards, engine); predictor.setPrior(prior); cout << testSet.size() <<" Examples"<<endl; for (int i=0; i < testSet.size(); i++) { int index = 0; vector<pair<int, int> > traj = testSet.at(i); vector<double> times = testSet.getTimes(i); pair<int, int> initial = traj.front(); pair<int,int> & botinGrid = testSet.at_bot(i); pair<double,double>& botinPoint = testSet.at_rbot(i); pair<double,double>& end = testSet.at_raw(i).back(); predictor.setStart(initial); double thresh = -20.0; double startTime = times.front(); char buf[1024]; sprintf(buf, "../output/pppredict%03d.dat", i); ofstream file(buf); for (double tick = startTime; index < traj.size(); tick+=0.4) { for ( ; index < traj.size() && times.at(index) < tick; index++); if (index == traj.size() ) break; cout << "Evidence: "<<i<<" timestep: "<<tick <<" index: "<<index<<endl; predictor.predict(traj.at(index), occupancy); cout << "SIZE: "<<prior.size()<<endl; vector<vector<double> > pos = predictor.getPosterior(); gridView.addBelief(pos, -30.0, 0.0,jet); grid.addObstacles(gridView, black); gridView.addLabel(botinGrid,green); vector<pair<int, int> > subTraj; subTraj.insert(subTraj.end(), traj.begin(), traj.begin()+index); gridView.addVector(subTraj, red, factor); sprintf(buf, "../compare/pp%03d-%03f.bmp", i, tick-startTime); gridView.write(buf); //pair<double,double> values = predictor.check(traj.back()); double cost = 0.0; for(int itr = 0;itr<index;itr++) cost +=rewards.at(traj[itr].first,traj[itr].second); cout<<i<<" Normalizer: "<<predictor.getNormalizer(traj.back())<< " path cost: "<<cost<<" Probability: "<<cost+predictor.getNormalizer(traj.back())<<endl; vector<vector<vector<double> > > timeOcc = predictor.getTimeOccupancy(); vector<vector<double > > posterior = predictor.getPosterior(); double maxV = -HUGE_VAL; pair<int,int> predestGrid; pair<double,double> predestPoint; for (int ii=0; ii< dims.first; ii++) { for (int jj=0; jj < dims.second; jj++) { if(posterior[ii][jj]>maxV){ predestGrid.first = ii; predestGrid.second = jj; } maxV = max(maxV, posterior.at(ii).at(jj)); } } predestPoint = grid.grid2Real(predestGrid.first,predestGrid.second); double dist = sqrt((end.first-predestPoint.first)*(end.first-predestPoint.first) +(end.second-predestPoint.second)*(end.second-predestPoint.second)); double logloss = entropy(posterior); cout<<"final belief: "<<posterior.at(traj.back().first).at(traj.back().second) <<" max: "<<maxV <<" logloss: "<<logloss<<endl; cout<<botinGrid.first<<" "<<botinGrid.second <<" "<<predestGrid.first<<" "<<predestGrid.second<<endl; file<<tick-startTime <<" "<<logloss <<" "<<posterior.at(botinGrid.first).at(botinGrid.second) <<" "<<posterior.at(traj.back().first).at(traj.back().second) <<" "<<maxV<<" "<<dist<<endl; } file.close(); } }