/** * Find visual word vocabulary by clustering all image patches */ cv::Mat findVocabulary(ImageClassList classes) { cout << "(VOCABULARY) Loading..." << endl; FEATURE_MATRIX(patches); for (ImageClassList::iterator it = classes.begin(); it != classes.end(); it++) { it->loadFeatures(patches); } // K-means to find visual words cout << "(VOCABULARY) Clustering..." << endl; int K = min(patches.size().height, VISUAL_WORD_COUNT); if (K == 0) { cerr << "Could not load any patches" << endl; exit(1); } Mat finalKMeansLabels; Mat words; kmeans(patches, K, finalKMeansLabels, TermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS, 1000, 0.001), 5, // attempts KMEANS_PP_CENTERS, words); return words; }
void StoreClassLabels(ImageClassList classes) { std::map<int, std::string> labels; for (ImageClassList::iterator it = classes.begin(); it != classes.end(); it++) { labels[it->intLabel()] = it->getLabel(); } ClassLabelsFile::Save(labels); }
void partition(const ImageClassList &source, double fractionA, ImageClassList &targetA, ImageClassList &targetB) { for (ImageClassList::const_iterator iter = source.begin(); iter != source.end(); iter++) { ImageClass copy(*iter); copy.shuffle(); int fraction = static_cast<int>(copy.image_count() * fractionA); ImageClass A(copy); ImageClass B(copy); // Partition into // [...... A ........ | .... B ....] // 0 fraction count B.remove(0, fraction); A.remove(fraction, A.image_count()); targetA.push_back(A); targetB.push_back(B); } }
void validate(const ImageClassList &imageSet, const cv::Mat &words, const CvSVM &svm) { cout << "Validating..." << endl; double accurate = 0; double total = 0; for (ImageClassList::const_iterator cls = imageSet.begin(); cls != imageSet.end(); cls++) { for (ImageList::const_iterator im = cls->images().begin(); im != cls->images().end(); im++) { cv::Mat imageWords(0, words.size().height, CV_32F); im->loadVisualWords(imageWords, words); float prediction = svm.predict(imageWords); total++; if (EQ(cls->intLabel(), prediction)) accurate++; } } cout.precision(3); cout << "Accuracy: " << (accurate / total) * 100 << "%" << endl; }
CvSVM trainClassifier(const cv::Mat &words, ImageClassList classes) { // Now express every image as a histogram of visual words, retaining class labels cv::Mat trainingSet(0, words.size().height, CV_32F); cv::Mat trainingLabels(0, 1, CV_8U); cout << "( CLASSIF. ) Loading..." << endl; for (ImageClassList::iterator it = classes.begin(); it != classes.end(); it++) { it->loadVisualWords(trainingSet, trainingLabels, words); } // Train an SVM classifier based on the visual words vocabulary CvSVMParams params; params.svm_type = CvSVM::C_SVC; params.kernel_type = CvSVM::RBF; params.gamma = SVM_GAMMA; params.term_crit = cvTermCriteria(CV_TERMCRIT_ITER, 100, 1e-6); // Train the SVM CvSVM svm; svm.train_auto(trainingSet, trainingLabels, Mat(), Mat(), params); return svm; }
void ParameterJob::CrossValidation (ImageFeaturesList& images, ImageClassList& imageClasses, String svmParamStr, int numOfFolds, ConfussionMatrix2& results, double& numOfSupportVectors, double& avgProbability, double& avgKnowClassProb, double& avgCompact, double& compactDev ) { log.Level (10) << "ParameterJob::CrossValidation" << endl; int correct = 0; int imageCount = images.QueueSize (); int classNum = 0; int total = 0; numOfSupportVectors = 0.0; avgProbability = 0.0; avgKnowClassProb = 0.0; avgCompact = 0.0; double predClassProb; double knownClassProb; double smallestDistance; double compact; double* compactTable = new double[imageCount]; int compactIDX = 0; ImageClassListIterator icIDX (imageClasses); for (icIDX.Reset (); icIDX.CurPtr (); ++icIDX) { icIDX->Num (classNum); classNum++; } int numImagesPerFold = (imageCount + numOfFolds - 1) / numOfFolds; int firstInGroup = 0; for (int foldNum = 0; foldNum < numOfFolds; foldNum++) { cout << "Job ID[" << jobId << "] Fold[" << foldNum << "]." << endl; int lastInGroup; // If We are doing the last Fold Make sure that we are including all the images // that have not been tested. if (foldNum == (numOfFolds - 1)) lastInGroup = imageCount; else lastInGroup = firstInGroup + numImagesPerFold - 1; ImageFeaturesListPtr trainingImages = new ImageFeaturesList (false, log); ImageFeaturesListPtr testImages = new ImageFeaturesList (false, log); log.Level (20) << "Fold Num[" << foldNum << "] " << "FirstTestImage[" << firstInGroup << "] " << "LastInGroup[" << lastInGroup << "]." << endl; ImageFeaturesPtr image = NULL; for (int x = 0; x < imageCount; x++) { image = images.IdxToPtr (x); if (image == NULL) { cout << "*" << endl; } if ((x >= firstInGroup) && (x <= lastInGroup)) { testImages->AddSingleImageFeatures (image); } else { trainingImages->AddSingleImageFeatures (image); } } String featureNumStr ("0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72"); FeatureNumList selectedFeatures (IncludeFeatureNums, featureNumStr); bool validParms; SVMparam svmParam (svmParamStr, selectedFeatures, log, validParms); svmParam.A_Param (aParm); svmParam.C_Param (cParm); svmParam.Gamma_Param (gammaParm); SVMModelPtr model = new SVMModel (svmParam, *trainingImages, imageClasses, log); numOfSupportVectors += model->NumOfSupportVectors (); ImageFeaturesListIterator tiIDX (*testImages); for (tiIDX.Reset (); tiIDX.CurPtr (); ++tiIDX) { ImageClassPtr knownClass = tiIDX->ImageClass (); //ImageClassPtr predictedClass = model->Predict (tiIDX.CurPtr ()); compact = 0; double compact; int numOfWinners; bool knownClassOneOfTheWinners; float probability1; // 5 Other Ways to look at Probability, float probability2; // 5 Other Ways to look at Probability, float probability3; // Impolmented for some Active Learning float probability4; // expirements. float probability5; float probability6; ImageClassPtr predictedClass = model->Predict (tiIDX.CurPtr (), knownClass, predClassProb, knownClassProb, smallestDistance, compact, numOfWinners, knownClassOneOfTheWinners, probability1, // 5 Other Ways to look at Probability, probability2, // 5 Other Ways to look at Probability, probability3, // Impolmented for some Active Learning probability4, // expirements. probability5, probability6 ); if (compact > 10000000000.0) compact = 10000000000.0; predictedClass = imageClasses.LookUpByName (predictedClass->Name ()); total++; if (knownClass->UpperName () == predictedClass->UpperName ()) { correct++; } results.Increment (knownClass, predictedClass, (int)tiIDX->OrigSize (), predClassProb, smallestDistance ); avgProbability += predClassProb; avgKnowClassProb += knownClassProb; avgCompact += compact; compactTable[compactIDX] = compact; compactIDX++; } delete model; delete trainingImages; delete testImages; firstInGroup = firstInGroup + numImagesPerFold; } // foldNum numOfSupportVectors = numOfSupportVectors / numOfFolds; avgProbability = avgProbability / imageCount; avgKnowClassProb = avgKnowClassProb / imageCount; avgCompact = avgCompact / imageCount; accuracy = 100.0 * (double)correct / (double)total; // Lets Calc compat Stats double compactVar = 0.0; double delta; for (compactIDX = 0; compactIDX < imageCount; compactIDX++) { delta = (avgCompact - compactTable[compactIDX]); compactVar += (delta * delta); } compactVar = sqrt (compactVar); compactDev = compactVar / imageCount; delete compactTable; cout << "Job ID[" << jobId << "] Accuracy[" << StrFormatDouble (accuracy, "zz0.000") << "%]." << endl; } /* CrossValidation */
ParameterProcess::ParameterProcess (RunLog& _log, ImageClassList& imageClasses, String _desc ): log (_log) { highestAccuracy = (FFLOAT)0.0; startTime = 0; _desc.TrimLeft (); curStatus = _desc.ExtractChar (); if ((curStatus == '0') || (curStatus == '1') || (curStatus == '2') ) { // We Are Ok } else { log.Level (-1) << endl << "ParameterProcess: We have an invalid Cur Status[" << curStatus << "]." << endl; exit (-1); } { String processsNumField = _desc.ExtractToken (" ,\t"); processNum = atoi (processsNumField.Str ()); } { String jobIdField = _desc.ExtractToken (" ,\t"); jobId = atoi (jobIdField.Str ()); } { // Lets get the two Classes that we are running for String class0Name = _desc.ExtractToken (" ,\t"); String class1Name = _desc.ExtractToken (" ,\t"); if (class0Name == "NoName") { class0 = class1 = NULL; } else { class0 = imageClasses.LookUpByName (class0Name); class1 = imageClasses.LookUpByName (class1Name); if (!class0) { cerr << endl << "*** ERROR *** ParameterProcess, Invalid Class0[" << class0Name << "]." << endl << endl; exit (-1); } if (!class1) { cerr << endl << "*** ERROR *** ParameterProcess, Invalid Class0[" << class1Name << "]." << endl << endl; exit (-1); } } } { String cParmStr = _desc.ExtractToken (" ,\t"); cParm = atoi (cParmStr.Str ()); } { String gammaParmStr = _desc.ExtractToken (" ,\t"); gammaParm = atof (gammaParmStr.Str ()); } { String aParmStr = _desc.ExtractToken (" ,\t"); aParm = atoi (aParmStr.Str ()); } { String highestAccuracyField = _desc.ExtractToken (" ,\t"); highestAccuracy = (FFLOAT)atof (highestAccuracyField.Str ()); } }
void ParameterJob::CrossValidation (ImageFeaturesList& images, ImageClassList& imageClasses, FeatureNumList& selectedFeatures, KKStr svmParamStr, int numOfFolds, ConfussionMatrix2& results, double& numOfSupportVectors, float& avgProbability, float& avgKnowClassProb, double& avgCompact, double& compactDev ) { log.Level (10) << "ParameterJob::CrossValidation" << endl; int correct = 0; int imageCount = images.QueueSize (); int total = 0; FileDescPtr fileDesc = images.FileDesc (); numOfSupportVectors = 0.0; avgProbability = 0.0; avgKnowClassProb = 0.0; avgCompact = 0.0; trainTime = 0.0f; classTime = 0.0f; numSVs = 0; float predClassProb; float knownClassProb; double smallestDistance; double compact; double* compactTable = new double[imageCount]; int compactIDX = 0; // Creae a default assignments based of current odering // of imageClasses. ClassAssignments assignments (imageClasses, log); int numImagesPerFold = (imageCount + numOfFolds - 1) / numOfFolds; int firstInGroup = 0; for (int foldNum = 0; foldNum < numOfFolds; foldNum++) { cout << "Job ID[" << jobId << "] Fold[" << foldNum << "]." << endl; int lastInGroup; // If We are doing the last Fold Make sure that we are including all the images // that have not been tested. if (foldNum == (numOfFolds - 1)) lastInGroup = imageCount; else lastInGroup = firstInGroup + numImagesPerFold - 1; ImageFeaturesListPtr trainingImages = new ImageFeaturesList (fileDesc, false, log); ImageFeaturesListPtr testImages = new ImageFeaturesList (fileDesc, false, log); log.Level (20) << "Fold Num[" << foldNum << "] " << "FirstTestImage[" << firstInGroup << "] " << "LastInGroup[" << lastInGroup << "]." << endl; ImageFeaturesPtr image = NULL; for (int x = 0; x < imageCount; x++) { image = images.IdxToPtr (x); if (image == NULL) { cout << "*" << endl; } if ((x >= firstInGroup) && (x <= lastInGroup)) { testImages->AddSingleImageFeatures (image); } else { trainingImages->AddSingleImageFeatures (image); } } bool validParms; SVMparam svmParam (svmParamStr, selectedFeatures, fileDesc, log, validParms); svmParam.A_Param (aParm); svmParam.C_Param (cParm); svmParam.Gamma_Param (gammaParm); svmParam.SelectedFeatures (selectedFeatures); SVMModelPtr model = new SVMModel (svmParam, *trainingImages, assignments, fileDesc, log, cancelFlag); trainTime += (float)model->TrainingTime (); numOfSupportVectors += model->NumOfSupportVectors (); numSVs += model->NumOfSupportVectors (); ImageFeaturesListIterator tiIDX (*testImages); double classStartTime = osGetSystemTimeUsed (); for (tiIDX.Reset (); tiIDX.CurPtr (); ++tiIDX) { ImageClassPtr knownClass = tiIDX->ImageClass (); //ImageClassPtr predictedClass = model->Predict (tiIDX.CurPtr ()); compact = 0; int numOfWinners; bool knownClassOneOfTheWinners; float probability1; // 5 Other Ways to look at Probability, float probability2; // 5 Other Ways to look at Probability, float probability3; // Impolmented for some Active Learning float probability4; // expirements. float probability5; float probability6; ImageClassPtr predictedClass = model->Predict (tiIDX.CurPtr (), knownClass, predClassProb, knownClassProb, smallestDistance, compact, numOfWinners, knownClassOneOfTheWinners, probability1, // 5 Other Ways to look at Probability, probability2, // 5 Other Ways to look at Probability, probability3, // Impolmented for some Active Learning probability4, // expirements. probability5, probability6 ); if (compact > 10000000000.0) compact = 10000000000.0; predictedClass = imageClasses.LookUpByName (predictedClass->Name ()); total++; if (knownClass->UpperName () == predictedClass->UpperName ()) { correct++; } results.Increment (knownClass, predictedClass, (int)tiIDX->OrigSize (), predClassProb ); avgProbability += predClassProb; avgKnowClassProb += knownClassProb; avgCompact += compact; compactTable[compactIDX] = compact; compactIDX++; } double classEndTime = osGetSystemTimeUsed (); classTime += (float)(classEndTime - classStartTime); delete model; delete trainingImages; delete testImages; firstInGroup = firstInGroup + numImagesPerFold; } // foldNum numOfSupportVectors = numOfSupportVectors / numOfFolds; avgProbability = avgProbability / imageCount; avgKnowClassProb = avgKnowClassProb / imageCount; avgCompact = avgCompact / (double)imageCount; accuracy = 100.0 * (double)correct / (double)total; // Lets Calc compat Stats double compactVar = 0.0; double delta; for (compactIDX = 0; compactIDX < imageCount; compactIDX++) { delta = (avgCompact - compactTable[compactIDX]); compactVar += (delta * delta); } compactVar = compactVar / (double)imageCount; compactDev = sqrt (compactVar);; delete compactTable; cout << "Job ID[" << jobId << "] Accuracy[" << StrFormatDouble (accuracy, "zz0.000") << "%]." << endl; } /* CrossValidation */