示例#1
0
// Continue returns the results into ptRes for savePosteriors
// must be called the computeResult first!!!
void MDDAGClassifier::continueComputingResults(InputData* pData, vector<BaseLearner*>& weakHypotheses,
        vector< ExampleResults* >& results, int fromIteration, int toIteration)
{
    assert( !weakHypotheses.empty() );

    const int numClasses = pData->getNumClasses();
    const int numExamples = pData->getNumExamples();


    // iterator over all the weak hypotheses
    vector<BaseLearner*>::const_iterator whyIt;
    int t;

    for (whyIt = weakHypotheses.begin(), t = 0;
            whyIt != weakHypotheses.end() && t < fromIteration; ++whyIt, ++t) {}

    // for every feature: 1..T
    for (; whyIt != weakHypotheses.end() && t < toIteration; ++whyIt, ++t)
    {
        BaseLearner* currWeakHyp = *whyIt;
        AlphaReal alpha = currWeakHyp->getAlpha();

        // for every point
        for (int i = 0; i < numExamples; ++i)
        {
            // a reference for clarity and speed
            vector<AlphaReal>& currVotesVector = results[i]->getVotesVector();

            // for every class
            for (int l = 0; l < numClasses; ++l)
                currVotesVector[l] += alpha * currWeakHyp->classify(pData, i, l);
        }
    }

}
	void DataReader::calculateHypothesesMatrix()
	{		
		cout << "[+] Calculate weak hyp matrix..." << endl;
		const int numExamples = _pCurrentData->getNumExamples();
        const int numClasses = _pCurrentData->getNumClasses();
        
		hypermat& allOutputs = _weakHypothesesMatrices[_pCurrentData];
		allOutputs.resize(numExamples);
		

        cout << "Memory allocation for " << numExamples << " examples, " << _numIterations << " classifiers, and " << numClasses << " classes..." << flush;
		for(int i = 0; i < numExamples; ++i)
		{
			allOutputs[i].resize(_numIterations);
            for (int j = 0; j < _numIterations; ++j) {
                allOutputs[i][j].resize(numClasses, 0.);
            }
		}
        cout << "Done." << endl;
		
//        const int step = (_totalNumIterations) < 50 ? 1 : (_totalNumIterations) / 50;
//        cout << "Computing the weak hyp outputs: 0%." << flush;

        cout << "Computing the weak hyp outputs... " << flush;
        int t = 0;
		for(int wHypInd = 0; wHypInd < _numIterations; ++wHypInd )
		{
//            
//            if ((t + 1) % 1000 == 0)
//                cout << "." << flush;
//            
//            if ((t + 1) % step == 0)
//            {
//                float progress = static_cast<float>(t) / (float)(_totalNumIterations) * 100.0;
//                cout << "." << setprecision(2) << progress << "%." << flush;
//            }

            vector<BaseLearner*>::iterator whypIt;
            for (whypIt = _weakHypotheses[wHypInd].begin(); whypIt != _weakHypotheses[wHypInd].end(); ++whypIt) {
//                AbstainableLearner* currWeakHyp = dynamic_cast<AbstainableLearner*>(*whypIt);
                BaseLearner* currWeakHyp = *whypIt;
                AlphaReal alpha = currWeakHyp->getAlpha();
                
                for(int i = 0; i < numExamples; ++i)
                {
                    for (int l = 0; l < numClasses; ++l)
                    {
                        allOutputs[i][wHypInd][l] += alpha * currWeakHyp->classify(_pCurrentData, i, l);
                    }
                }

                ++t;
            }
        }
								
		cout << "Done." << endl;
	}
示例#3
0
	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	bool AdaBoostMDPClassifier::classifyTestMDP( int i )
	{
		double acc=0.0;
		const int numClasses = _pData->getNumClasses();
		const int numExamples = _pTestData->getNumExamples();
		
		
		ExampleResults* tmpResult = new ExampleResults( i, numClasses );			
		vector<AlphaReal>& currVotesVector = tmpResult->getVotesVector();
		
		for( int j=0; j<_weakHypotheses.size(); j++ )
		{
			
			if (_history[j]) {
				BaseLearner* currWeakHyp = _weakHypotheses[j];
				float alpha = currWeakHyp->getAlpha();
				
				// for every class
				for (int l = 0; l < numClasses; ++l)
					currVotesVector[l] += alpha * currWeakHyp->classify(_pTestData, i, l);
			}
		}
		
		
		vector<Label>::const_iterator lIt;
		
		const vector<Label>& labels = _pTestData->getLabels(i);
		
		
		// the vote of the winning negative class
		float maxNegClass = -numeric_limits<float>::max();
		// the vote of the winning positive class
		float minPosClass = numeric_limits<float>::max();
		
		
		for ( lIt = labels.begin(); lIt != labels.end(); ++lIt )
		{
			// get the negative winner class
			if ( lIt->y < 0 && currVotesVector[lIt->idx] > maxNegClass )
				maxNegClass = currVotesVector[lIt->idx];
			
			// get the positive winner class
			if ( lIt->y > 0 && currVotesVector[lIt->idx] < minPosClass )
				minPosClass = currVotesVector[lIt->idx];
		}
		
		// if the vote for the worst positive label is lower than the
		// vote for the highest negative label -> error
		if (minPosClass <= maxNegClass)
			return false;
		else {
			return true;
		}
		
	}
    vector<AlphaReal> DataReader::getWhypClassification( const int wHypInd, const int instance )
	{
		const int numClasses = _pCurrentData->getNumClasses();
		
        vector<AlphaReal> scoreVector(numClasses);
        
        vector<BaseLearner*>::iterator whypIt;
        for (whypIt = _weakHypotheses[wHypInd].begin(); whypIt != _weakHypotheses[wHypInd].end(); ++whypIt) {
            BaseLearner* currWeakHyp = *whypIt;

            AlphaReal alpha = currWeakHyp->getAlpha();
            
            for (int l = 0; l < numClasses; ++l)
                scoreVector[l] += alpha * currWeakHyp->classify(_pCurrentData, instance, l);
		}
		return scoreVector;
	}
	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	vector<int> DataReader::classifyKthWeakLearner( const int wHypInd, const int instance, ExampleResults* exampleResult )
	{		
		if (_verbose>3) {
			//cout << "Classifiying: " << wHypInd << endl;
		}
		
		if ( wHypInd >= _numIterations ) {
            assert(false);
        }
		
		const int numClasses = _pCurrentData->getNumClasses();				
		
		// a reference for clarity and speed
		vector<AlphaReal>& currVotesVector = exampleResult->getVotesVector();
        
        vector<int> ternaryPhis(numClasses);
        
		AlphaReal alpha;
		
		// for every class
		if (_isDataStorageMatrix)
		{
			for (int l = 0; l < numClasses; ++l) {
				currVotesVector[l] += (*_pCurrentMatrix)[instance][wHypInd][l];
                ternaryPhis[l] = (currVotesVector[l] > 0) ? 1 : ((currVotesVector[l] < 0) ? -1 : 0) ;
            }
		}
        else
		{
            vector<BaseLearner*>::iterator whypIt;
            for (whypIt = _weakHypotheses[wHypInd].begin(); whypIt != _weakHypotheses[wHypInd].end(); ++whypIt) {
                BaseLearner* currWeakHyp = *whypIt;
                alpha = currWeakHyp->getAlpha();
                
                for (int l = 0; l < numClasses; ++l) {
                    int vote = currWeakHyp->classify(_pCurrentData, instance, l);
                    currVotesVector[l] += alpha * vote;
                    
                    ternaryPhis[l] = (currVotesVector[l] > 0) ? 1 : ((currVotesVector[l] < 0) ? -1 : 0) ;
                }
            }
		}
		
		return ternaryPhis;
	}
示例#6
0
 void SoftCascadeLearner::computePosteriors(InputData* pData, vector<BaseLearner*> & weakHypotheses, vector<AlphaReal> & oPosteriors, int positiveLabelIndex)
 {
     const int numExamples = pData->getNumExamples();
     
     oPosteriors.resize(numExamples);
     fill(oPosteriors.begin(), oPosteriors.end(), 0. );
     
     vector<BaseLearner*>::iterator whyIt = weakHypotheses.begin();                          
     for (;whyIt != weakHypotheses.end(); ++whyIt )
     {
         BaseLearner* currWeakHyp = *whyIt;
         AlphaReal alpha = currWeakHyp->getAlpha();
                     
         for (int i = 0; i < numExamples; ++i)
         {
             AlphaReal alphaH = alpha * currWeakHyp->classify(pData, i, positiveLabelIndex);
             oPosteriors[i] += alphaH;
         }                       
     }
 }
	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	double DataReader::classifyKthWeakLearner( const int wHypInd, const int instance, ExampleResults* exampleResult )		
	{		
		if (_verbose>3) {
			//cout << "Classifiying: " << wHypInd << endl;
		}
		
		if ( wHypInd >= _numIterations ) return -1.0; // indicating error						
		
		const int numClasses = _pCurrentData->getNumClasses();
		
		BaseLearner* currWeakHyp = _weakHypotheses[wHypInd];
		float alpha = currWeakHyp->getAlpha();
		
		// a reference for clarity and speed
		vector<AlphaReal>& currVotesVector = exampleResult->getVotesVector();
		
		// for every class
		for (int l = 0; l < numClasses; ++l)
			currVotesVector[l] += alpha * currWeakHyp->classify(_pCurrentData, instance, l);
		
		return alpha;
	}
示例#8
0
// Returns the results into ptRes
void MDDAGClassifier::computeResults(InputData* pData, vector<BaseLearner*>& weakHypotheses,
                                     vector< ExampleResults* >& results, int numIterations)
{
    assert( !weakHypotheses.empty() );

    const int numClasses = pData->getNumClasses();
    const int numExamples = pData->getNumExamples();

    // Initialize the output info
    OutputInfo* pOutInfo = NULL;

    if ( !_outputInfoFile.empty() )
    {
        if ( _args.getNumValues("outputinfo") > 1 )
        {
            pOutInfo = new OutputInfo(_args);;
        }
        else
        {
            pOutInfo = new OutputInfo(_outputInfoFile, "e01hamauc", false);
        }

    }


    // Creating the results structures. See file Structures.h for the
    // PointResults structure
    results.clear();
    results.reserve(numExamples);
    for (int i = 0; i < numExamples; ++i)
        results.push_back( new ExampleResults(i, numClasses) );

    // iterator over all the weak hypotheses
    vector<BaseLearner*>::const_iterator whyIt;
    int t;

    if ( pOutInfo )
    {
        pOutInfo->initialize( pData );
        pOutInfo->outputHeader(pData->getClassMap(),
                               true, // output iterations
                               false, // output time
                               true // endline
                              );
    }

    // for every feature: 1..T
    for (whyIt = weakHypotheses.begin(), t = 0;
            whyIt != weakHypotheses.end() && t < numIterations; ++whyIt, ++t)
    {
        BaseLearner* currWeakHyp = *whyIt;
        AlphaReal alpha = currWeakHyp->getAlpha();

        // for every point
        for (int i = 0; i < numExamples; ++i)
        {
            // a reference for clarity and speed
            vector<AlphaReal>& currVotesVector = results[i]->getVotesVector();

            // for every class
            for (int l = 0; l < numClasses; ++l)
                currVotesVector[l] += alpha * currWeakHyp->classify(pData, i, l);
        }

        // if needed output the step-by-step information
        if ( pOutInfo )
        {
            pOutInfo->outputIteration(t);
            //				pOutInfo->outputError(pData, currWeakHyp);
            //				pOutInfo->outTPRFPR(pData);
            //pOutInfo->outputBalancedError(pData, currWeakHyp);
            //				if ( ( t % 1 ) == 0 ) {
            //					pOutInfo->outputROC(pData);
            //				}

            pOutInfo->outputCustom(pData, currWeakHyp);
            // Margins and edge requires an update of the weight,
            // therefore I keep them out for the moment
            //outInfo.outputMargins(pData, currWeakHyp);
            //outInfo.outputEdge(pData, currWeakHyp);
            pOutInfo->endLine();
        }
    }

    if (pOutInfo)
        delete pOutInfo;

}
示例#9
0
void MDDAGClassifier::saveLikelihoods(const string& dataFileName, const string& shypFileName,
                                      const string& outFileName, int numIterations)
{
    InputData* pData = loadInputData(dataFileName, shypFileName);

    if (_verbose > 0)
        cout << "Loading strong hypothesis..." << flush;

    // The class that loads the weak hypotheses
    UnSerialization us;

    // Where to put the weak hypotheses
    vector<BaseLearner*> weakHypotheses;

    // loads them
    us.loadHypotheses(shypFileName, weakHypotheses, pData);

    // where the results go
    vector< ExampleResults* > results;

    if (_verbose > 0)
        cout << "Classifying..." << flush;

    const int numClasses = pData->getNumClasses();
    const int numExamples = pData->getNumExamples();


    ofstream outFile(outFileName.c_str());
    string exampleName;

    if (_verbose > 0)
        cout << "Output likelihoods..." << flush;

    // get the results
    /////////////////////////////////////////////////////////////////////
    // computeResults( pData, weakHypotheses, results, numIterations );
    assert( !weakHypotheses.empty() );

    // Initialize the output info
    OutputInfo* pOutInfo = NULL;

    if ( !_outputInfoFile.empty() )
        pOutInfo = new OutputInfo(_outputInfoFile, "err");

    // Creating the results structures. See file Structures.h for the
    // PointResults structure
    results.clear();
    results.reserve(numExamples);
    for (int i = 0; i < numExamples; ++i)
        results.push_back( new ExampleResults(i, numClasses) );

    // sum votes for classes
    vector< AlphaReal > votesForExamples( numClasses );
    vector< AlphaReal > expVotesForExamples( numClasses );

    // iterator over all the weak hypotheses
    vector<BaseLearner*>::const_iterator whyIt;
    int t;

    pOutInfo->initialize( pData );

    // for every feature: 1..T
    for (whyIt = weakHypotheses.begin(), t = 0;
            whyIt != weakHypotheses.end() && t < numIterations; ++whyIt, ++t)
    {
        BaseLearner* currWeakHyp = *whyIt;
        AlphaReal alpha = currWeakHyp->getAlpha();

        // for every point
        for (int i = 0; i < numExamples; ++i)
        {
            // a reference for clarity and speed
            vector<AlphaReal>& currVotesVector = results[i]->getVotesVector();

            // for every class
            for (int l = 0; l < numClasses; ++l)
                currVotesVector[l] += alpha * currWeakHyp->classify(pData, i, l);
        }

        // if needed output the step-by-step information
        if ( pOutInfo )
        {
            pOutInfo->outputIteration(t);
            pOutInfo->outputCustom(pData, currWeakHyp);

            // Margins and edge requires an update of the weight,
            // therefore I keep them out for the moment
            //outInfo.outputMargins(pData, currWeakHyp);
            //outInfo.outputEdge(pData, currWeakHyp);

            pOutInfo->endLine();

        } // for (int i = 0; i < numExamples; ++i)
        // calculate likelihoods from votes

        fill( votesForExamples.begin(), votesForExamples.end(), 0.0 );
        AlphaReal lLambda = 0.0;
        for (int i = 0; i < numExamples; ++i)
        {
            // a reference for clarity and speed
            vector<AlphaReal>& currVotesVector = results[i]->getVotesVector();
            AlphaReal sumExp = 0.0;
            // for every class
            for (int l = 0; l < numClasses; ++l)
            {
                expVotesForExamples[l] =  exp( currVotesVector[l] ) ;
                sumExp += expVotesForExamples[l];
            }

            if ( sumExp > numeric_limits<AlphaReal>::epsilon() )
            {
                for (int l = 0; l < numClasses; ++l)
                {
                    expVotesForExamples[l] /= sumExp;
                }
            }

            Example ex = pData->getExample( results[i]->getIdx() );
            vector<Label> labs = ex.getLabels();
            AlphaReal m = numeric_limits<AlphaReal>::infinity();
            for (int l = 0; l < numClasses; ++l)
            {
                if ( labs[l].y > 0 )
                {
                    if ( expVotesForExamples[l] > numeric_limits<AlphaReal>::epsilon() )
                    {
                        AlphaReal logVal = log( expVotesForExamples[l] );

                        if ( logVal != m ) {
                            lLambda += ( ( 1.0/(AlphaReal)numExamples ) * logVal );
                        }
                    }
                }
            }


        }


        outFile << t << "\t" << lLambda ;
        outFile << '\n';

        outFile.flush();
    }

    if (pOutInfo)
        delete pOutInfo;

    // computeResults( pData, weakHypotheses, results, numIterations );
    ///////////////////////////////////////////////////////////////////////////////////


    /*
     for (int i = 0; i < numExamples; ++i)
     {
     // output the name if it exists, otherwise the number
     // of the example
     exampleName = pData->getExampleName(i);
     if ( !exampleName.empty() )
     outFile << exampleName << ',';

     // output the posteriors
     outFile << results[i]->getVotesVector()[0];
     for (int l = 1; l < numClasses; ++l)
     outFile << ',' << results[i]->getVotesVector()[l];
     outFile << '\n';
     }
     */

    if (_verbose > 0)
        cout << "Done!" << endl;

    if (_verbose > 1)
    {
        cout << "\nClass order (You can change it in the header of the data file):" << endl;
        for (int l = 0; l < numClasses; ++l)
            cout << "- " << pData->getClassMap().getNameFromIdx(l) << endl;
    }

    // delete the input data file
    if (pData)
        delete pData;

    vector<ExampleResults*>::iterator it;
    for (it = results.begin(); it != results.end(); ++it)
        delete (*it);
}
	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	double DataReader::getAdaboostPerfOnCurrentDataset()
	{
		const int numClasses = _pCurrentData->getNumClasses();
		const int numExamples = _pCurrentData->getNumExamples();
		
		int correct = 0;
		int incorrect = 0;
		
        double err;
        
        vector<double>& iterationWiseError = _iterationWiseError[_pCurrentData];
        iterationWiseError.resize(_weakHypotheses.size(), 0.);

        vector<ExampleResults*> examplesResults(numExamples);
        for (int i = 0; i < numExamples; ++i)
			examplesResults[i] = new ExampleResults(i, numClasses) ;

        for( int j = 0; j < _weakHypotheses.size(); ++j )
        {
            correct = 0;
            incorrect = 0;

            for( int i = 0; i < numExamples; ++i )
            {
                ExampleResults*& tmpResult = examplesResults[i];
                vector<AlphaReal>& currVotesVector = tmpResult->getVotesVector();
                
                if (_isDataStorageMatrix)
                {
                    for (int l = 0; l < numClasses; ++l)
                        currVotesVector[l] += (*_pCurrentMatrix)[i][j][l];
                }
                else
                {
                    vector<BaseLearner*>::iterator whypIt;
                    for (whypIt = _weakHypotheses[j].begin(); whypIt != _weakHypotheses[j].end(); ++whypIt) {
                        BaseLearner* currWeakHyp = *whypIt;
                        AlphaReal alpha = currWeakHyp->getAlpha();
                        
                        // for every class
                        for (int l = 0; l < numClasses; ++l)
                            currVotesVector[l] += alpha * currWeakHyp->classify(_pCurrentData, i, l);
                    }
                }
                
                vector<Label>::const_iterator lIt;
                const vector<Label>& labels = _pCurrentData->getLabels(i);
                
                // the vote of the winning negative class
                AlphaReal maxNegClass = -numeric_limits<AlphaReal>::max();
                // the vote of the winning positive class
                AlphaReal minPosClass = numeric_limits<AlphaReal>::max();
                
                for ( lIt = labels.begin(); lIt != labels.end(); ++lIt )
                {
                    // get the negative winner class
                    if ( lIt->y < 0 && currVotesVector[lIt->idx] > maxNegClass )
                        maxNegClass = currVotesVector[lIt->idx];
                    
                    // get the positive winner class
                    if ( lIt->y > 0 && currVotesVector[lIt->idx] < minPosClass )
                        minPosClass = currVotesVector[lIt->idx];
                }
                
                // if the vote for the worst positive label is lower than the
                // vote for the highest negative label -> error
                if (minPosClass <= maxNegClass)
                    incorrect++;
                else {
                    correct++;
                }
            }
            
            err = ((double) incorrect / ((double) numExamples)); // * 100.0;
            iterationWiseError[j] = err;
		}

//        cout << endl;
//        int i = 0;
//        for (const auto & myTmpKey : _iterationWiseError[_pCurrentData]) {
//            cout << myTmpKey << " ";
//            ++i;
//            if (i > 50) {
//                break;
//            }
//        }
//        cout << endl;
        
        for (int i = 0; i < numExamples; ++i)
			delete examplesResults[i] ;
        

//		double acc = ((double) correct / ((double) numExamples)) * 100.0;
		return err;
	}
示例#11
0
	// -------------------------------------------------------------------------
	// -------------------------------------------------------------------------
	AlphaReal AdaBoostMHLearner::updateWeights(OutputInfo* pOutInfo, InputData* pData, vector<BaseLearner*>& pWeakHypothesis){
		const int numExamples = pData->getNumExamples();
		const int numClasses = pData->getNumClasses();

		AlphaReal Z = 0; // The normalization factor

		// _hy will contain the margins
		_hy.resize(numExamples);
		for ( int i = 0; i < numExamples; ++i){
			_hy[i].resize(numClasses);
			fill( _hy[i].begin(), _hy[i].end(), 0.0 );
		}

		
		vector<BaseLearner*>::iterator it;
		if (_verbose > 0)
			cout << ": 0%." << flush;

		const int numIters = static_cast<int>(_foundHypotheses.size());
		const int step = numIters < 5 ? 1 : numIters / 5;

		int t = 0;
		// calculate the margins ( f^{t}(x_i) ), _hy will contain
		for( it = pWeakHypothesis.begin(); it != pWeakHypothesis.end(); it++, t++ )
		{

			if (_verbose > 1 && (t + 1) % step == 0)
			{
				float progress = static_cast<float>(t) / static_cast<float>(numIters) * 100.0;                             
				cout << "." << setprecision(2) << progress << "%." << flush;
			}

			BaseLearner* pWeakHypothesis = *it;

			const AlphaReal alpha = pWeakHypothesis->getAlpha();
			AlphaReal hx;
			for (int i = 0; i < numExamples; ++i)
			{
				vector<Label>& labels = pData->getLabels(i);
				vector<Label>::iterator lIt;
				for (lIt = labels.begin(); lIt != labels.end(); ++lIt )
				{
					hx = pWeakHypothesis->classify(pData, i, lIt->idx );
					_hy[i][lIt->idx] += alpha * hx; // alpha * h_l(x_i)				

					lIt->weight *= exp( -alpha * hx * lIt->y );

					Z += lIt->weight;
					
					
				}
			}
			// renormalize the weights
			for (int i = 0; i < numExamples; ++i)
			{
				vector<Label>& labels = pData->getLabels(i);
				vector<Label>::iterator lIt;

				for (lIt = labels.begin(); lIt != labels.end(); ++lIt )
				{
					lIt->weight /= Z;
				}
			}

			//i++;
			//if ( i % 1000 == 0 ) cout << i <<endl;
		}
		
		//upload the margins 
		pOutInfo->setTable( pData, _hy );
		return 0;
	}
示例#12
0
	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	double DataReader::getAccuracyOnCurrentDataSet()
	{
		double acc=0.0;
		const int numClasses = _pCurrentData->getNumClasses();
		const int numExamples = _pCurrentData->getNumExamples();
		
		int correct=0;
		int incorrect=0;
		
		for( int i = 1; i < numExamples; i++ )
		{			
			ExampleResults* tmpResult = new ExampleResults( i, numClasses );			
			vector<AlphaReal>& currVotesVector = tmpResult->getVotesVector();
			
			for( int j=0; j<_weakHypotheses.size(); j++ )
			{
				
				BaseLearner* currWeakHyp = _weakHypotheses[j];
				float alpha = currWeakHyp->getAlpha();
				
				// for every class
				for (int l = 0; l < numClasses; ++l)
					currVotesVector[l] += alpha * currWeakHyp->classify(_pCurrentData, i, l);
				
			}
			
			
			vector<Label>::const_iterator lIt;
			
			const vector<Label>& labels = _pCurrentData->getLabels(i);
			
			
			// the vote of the winning negative class
			float maxNegClass = -numeric_limits<float>::max();
			// the vote of the winning positive class
			float minPosClass = numeric_limits<float>::max();
			
			
			for ( lIt = labels.begin(); lIt != labels.end(); ++lIt )
			{
				// get the negative winner class
				if ( lIt->y < 0 && currVotesVector[lIt->idx] > maxNegClass )
					maxNegClass = currVotesVector[lIt->idx];
				
				// get the positive winner class
				if ( lIt->y > 0 && currVotesVector[lIt->idx] < minPosClass )
					minPosClass = currVotesVector[lIt->idx];
			}
			
			// if the vote for the worst positive label is lower than the
			// vote for the highest negative label -> error
			if (minPosClass <= maxNegClass)
				incorrect++;
			else {
				correct++;
			}
			
		}
		
	    acc = ((double) correct / ((double) numExamples)) * 100.0;
		
		return acc;
	}