std::vector<std::vector<int> > annTrain::getConfusionMatrix() { cv::Mat testOutput; mlp->predict(testSamples, testOutput); // we now have 5 classes std::vector<std::vector<int> > confusionMatrix(5, std::vector<int>(5)); for (int i = 0; i < testOutput.rows; i++) { int predictedClass = getPredictedClass(testOutput.row(i)); int expectedClass = testOutputExpected.at(i); cout << expectedClass << " " << predictedClass; confusionMatrix[expectedClass][predictedClass]++; } return confusionMatrix; }
void KNN::test(Examples& exs){ TRACE_V(TAG,"test"); //Statistics: map<string,unsigned long long> classHits; map<string,unsigned long long> classMiss; map<string,unsigned long long> mappedDocs; map<string,unsigned long long> docsPerClass; int numExamples = 0; for(ExampleIterator it = exs.getBegin(); it != exs.getEnd(); it++){ numExamples++; if(numExamples % 100 == 0) cout<<"Evaluated: " << numExamples<<endl; Example ex = *it; vector<string> textTokens = ex.getTextTokens(); vector<int> textFreqTokens = ex.getTextFrequency(); vector<double> numTokens = ex.getNumericalTokens(); vector<string> catTokens = ex.getCategoricalTokens(); string eId = ex.getId(); string classId = ex.getClass(); map<string, double> examplesTestSize; //credibility to each class if((usingKNNOptimize && !valuesSaved ) || !usingKNNOptimize){ for(unsigned int i = 3; i < textTokens.size(); i++){ string termId = textTokens[i]; int tf = textFreqTokens[i-3]; for(set<string>::iterator classIt = stats->getClasses().begin(); classIt != stats->getClasses().end(); classIt++) { double tfidf = tf * getContentCredibility(termId, *classIt); examplesTestSize[*classIt] += (tfidf * tfidf); } } } map<string, double> similarity; if(usingKNNOptimize && valuesSaved){ similarity = saveValues[eId]; } else{ for(unsigned int i = 3; i < textTokens.size();i++){ string termId = textTokens[i]; int tf = textFreqTokens[1-3]; for(set<docWeighted, docWeightedCmp>::iterator termIt = termDocWset[termId].begin(); termIt != termDocWset[termId].end(); termIt++){ string trainClass = stats-> getTrainClass(termIt->docId); double trainDocSize = docTrainSizes[termIt->docId]; double trainTermWeight = termIt->weight; double testTermWeight = tf * getContentCredibility(termId, trainClass); similarity[termIt->docId] += ( - ( trainTermWeight / sqrt(trainDocSize) * testTermWeight / sqrt(examplesTestSize[trainClass]) )); // cout<<"sim = " << similarity[termIt->docId] <<endl; } } //numerical KNN for(map<string, vector<double> >::iterator trainIt = exNumTrain.begin(); trainIt != exNumTrain.end(); trainIt++){ double dist = 0.0; for(unsigned int i = 0; i < numTokens.size(); i++){ double a = minMaxNorm(numTokens[i],i); double b = minMaxNorm(exNumTrain[trainIt->first][i],i); double val = (a-b)*(a-b); //double val = (numTokens[i] - exNumTrain[trainIt->first][i]) * ( numTokens[i] - exNumTrain[trainIt->first][i]); // cout<<numTokens[i] << " - " <<exNumTrain[trainIt->first][i] <<endl; // cout<<"a = " << a << " b = " << b << " val =" << val<<endl; if( greaterThan(dist + val, numeric_limits<double>::max())){ // cerr<<"OOOOOOOOOOOOOOOPA!!!"<<endl; // exit(0); dist = numeric_limits<double>::max() - 1.0; break; } dist += val; // cout<<"dist =" << dist<<endl; } similarity[trainIt->first] += dist; } //categorical KNN for(map<string, vector<string> >::iterator trainIt = exCatTrain.begin(); trainIt != exCatTrain.end(); trainIt++){ double dist = 0.0; for(unsigned int i = 0; i < catTokens.size(); i++){ string trainTok = exCatTrain[trainIt->first][i]; string testTok = catTokens[i]; double catCred = getCategoricalCredibility(i, testTok, stats->getTrainClass(trainIt->first)); // cout<<"catCred = " <<catCred<<endl; // cout<<" i = " << i << "teste = " << testTok<<" treino = " << trainTok<<endl; if(trainTok != testTok){ // dist+= 1.0/(catCred+ 1.0) + 1.0; dist+= 1.0/(catCred+ 1.0); // cout<<"dist = " << dist<<endl; } } similarity[trainIt->first] += dist; } // cout<<"class = " << classId << " doc = " << trainIt->first<< " docClass = " << stats->getTrainClass(trainIt->first) << " dist="<<dist<< " 1/dist = " <<1.0/dist<< " sqrt = "<<sqrt(dist)<<endl; } if(!valuesSaved && usingKNNOptimize){ saveValues[eId] = similarity; } //sim of each example in test set set<docWeighted, docWeightedCmp> sim; for(map<string, double>::iterator testIt = similarity.begin(); testIt != similarity.end(); testIt++){ //calculating graph credibility....if so vector<double> graphsCreds(graphsCredibility.size()); double similarityValue = testIt->second; // cout<< " eid = " << eId << " eclass = " << classId << " traindocclass = " << stats->getTrainClass(testIt->first) << " similarit = " << similarityValue<< endl; for(unsigned int g = 0 ; g < graphsCredibility.size(); g++){ double gsim = getGraphCredibility(g, eId, stats->getTrainClass(testIt->first)); similarityValue /= (0.5+gsim); } //never change this, it is necessary docWeighted dw(testIt->first, similarityValue); sim.insert(dw); } string predictedLabel = getPredictedClass(sim); computeConfusionMatrix(classId, predictedLabel); // if(io->usingPredictionsFile) savePrediction(eId, classId, predictedLabel); if(predictedLabel == classId){ classHits[classId] ++; } else{ classMiss[classId]++; } mappedDocs[predictedLabel]++; docsPerClass[classId]++; } if(valuesSaved == false){ valuesSaved = true; } calculateF1(classHits,classMiss,docsPerClass, mappedDocs); // showConfusionMatrix(); }