Exemple #1
0
/*************************************
 * Function: update_feature_weight
 * -------------------------------
 * Given TrainingData and a WeakClassifier that has been weighted in
 * get_best_classifier(), we recalculate the weights of all the features
 *
 * td: training data (set of features)
 * wc: (weighted) weak classifier
 *
 * returns true if successful, false otherwise
 */
bool AdaBooster::update_feature_weight(TrainingData &td, WeakClassifier &wc){
	// check that WeakClassifier has actually been weighted
	if (wc.weight() < 0){
		printf("Error in update_feature_weight: WeakClassifier has invalid weight\n");
		return false;
	}

	// traverse features in feature set and adjust their weights
	for (unsigned int i=0; i<num_features; i++){
		FeatureVector* fv = td.feature(i);
		// either 1 or -1 (used in weight below)
		int is_correct = is_classifier_correct(wc, *fv) ? 1 : -1;

		// calculate and update weight
		// note M_E := 2.71828
		float weight = pow(M_E, (double) -1 * wc.weight() * is_correct);
		td.setWeight(i, td.weight(i)*weight);
	}

	// calculate normalization factor
	float norm = 0;
	for (unsigned int i=0; i<num_features; i++)
		norm += td.weight(i);

	// normalize feature weights
	for (unsigned int i=0; i<num_features; i++)
		td.setWeight(i, td.weight(i)/norm);
	
	return true; // successful
}
Exemple #2
0
void CascadeWriter::writeStages()
{
    xml.writeStartElement("stages");

    for (int i = 0; i < m_cascadeObj.stageNum(); i++)
    {
        QString stageStr = QString(" stage %1 ").arg(i);
        xml.writeComment(stageStr);

        Stage stage = m_cascadeObj.getStage(i);
        xml.writeStartElement("_");
        xml.writeTextElement("maxWeakCount", QString::number(stage.maxWeakCount()));
        xml.writeTextElement("stageThreshold", QString::number(stage.stageThreshold()));

        // write weak classifiers
        xml.writeStartElement("weakClassifiers");
        for (int j = 0; j < stage.maxWeakCount(); j++)
        {
            WeakClassifier wc = stage.getWeakClassifier(j);
            xml.writeStartElement("_");
            xml.writeTextElement("internalNodes", wc.internalNodes());
            xml.writeTextElement("leafValues", wc.leafValues());
            xml.writeEndElement();
        }
        xml.writeEndElement();

        xml.writeEndElement();
    }
    xml.writeEndElement();
}
Exemple #3
0
/**************************************
 * Fucntion: is_classifier_correct
 * -------------------------------
 * returns true if weak classifier (wc) correctly identified the 
 * feature vector (fv), false otherwise.
 */
bool AdaBooster::is_classifier_correct(WeakClassifier &wc, FeatureVector &fv){
	
	// check if threshold is greater than (or equal to) feature
	bool guess = ( wc.threshold() >= fv.at(wc.dimension()) );

	// if classifier is flipped, negate guess
	guess = wc.isFlipped() ? !guess : guess;

	// find actual value of point
	bool real = ( fv.val() == POS );

	// return if guess and real agree
	return ( real == guess );

}
Exemple #4
0
bool AdaBoost::addWeakClassifier(const WeakClassifier &weakClassifer){
    
    WeakClassifier *weakClassiferPtr = weakClassifer.createNewInstance();
    weakClassifiers.push_back( weakClassiferPtr );
    
    return true;
}
Exemple #5
0
bool AdaBoost::setWeakClassifier(const WeakClassifier &weakClassifer){
    
    //Clear any previous weak classifers
    clearWeakClassifiers();
    
    WeakClassifier *weakClassiferPtr = weakClassifer.createNewInstance();
    
    weakClassifiers.push_back( weakClassiferPtr );
    
    return true;
}
float StrongClassifier::evaluate(const std::vector<float> &features) const { //feature
  float decision = 0;
  for (int i=0; i<m_weakClassifiers.size(); i++) {
    WeakClassifier weak = m_weakClassifiers.at(i);

    int sign;
    if ( (weak.threshold() > features[weak.dimension()] && !weak.isFlipped()) ||
            (weak.threshold() < features[weak.dimension()] && weak.isFlipped()) )
        sign = 1;
    else
        sign = -1;
    decision += weak.weight() * sign;
  }
  return decision;
}
void AdaBoostClassifier::AddOneSimpleClassifier(bool* used)
{
	int i;
	REAL alpha,beta;
	WeakClassifier minsc;
	REAL minerror;
	int minindex;

	minerror = REAL(1.01); minindex  = -1;
	for(i=0;i<gTotalCount;i++) gLabels[i] = gTrainSet[i].m_iLabel;
	NormalizeWeight();
	TRACE("%f %f %f %f\n",*min_element(gWeights,gWeights+gFaceCount)*10000,*max_element(gWeights,gWeights+gFaceCount)*10000,
		     *min_element(gWeights+gFaceCount,gWeights+gTotalCount)*10000,*max_element(gWeights+gFaceCount,gWeights+gTotalCount)*10000);
	for(i=0;i<gTotalFeatures;i++)
	{
		if(used[i]) continue;
		SingleFeatureClassifier(gLabels,gClassifiers[i],gTable[i]);
		if(gClassifiers[i].m_rError<minerror)
		{
			minerror = gClassifiers[i].m_rError;
			minsc = gClassifiers[i];
			minindex = i;
		}
	}

	used[minindex] = true;
	beta = minsc.m_rError / (REAL(1.0)-minsc.m_rError);
	for(i=0;i<gTotalCount;i++)
		if(minsc.Apply(gTrainSet[i]) == gTrainSet[i].m_iLabel)
			gWeights[i] *= beta;
	if(beta<REAL(1e-8)) beta = REAL(1e-8);
	alpha = -log(beta);

	m_WeakClassifiers[m_iCount] = minsc;
	m_rAlphas[m_iCount] = alpha;
	m_rThreshold += (REAL(0.5)*alpha);
	m_iCount++;
}
void SingleFeatureClassifier(const int* const labels,WeakClassifier& sc,const int* const indexes)
{
	int i;
	REAL min1; REAL min2;
	REAL e1,e2;
	int pos1,pos2;

	// compute errors1, suppose parity is 1, that is f(x)<thresh ==> h(x) = 1
	// compute errors2, suppose parity is 0, that is f(x)>thresh ==> h(x) = 1
	e1 = 0.0; for(i=0;i<gTotalCount;i++) if(labels[i]!=0) e1+=gWeights[i];
	e2 = REAL(1.0)-e1;
	min1 = e1; pos1 = 0;
	min2 = e2; pos2 = 0;
	for(i=0;i<gTotalCount;i++)
	{
		if(labels[indexes[i]]!=0) e1-=gWeights[indexes[i]]; else e1+=gWeights[indexes[i]];
		if(e1<min1) { min1=e1; pos1=i; }
		e2 = 1 - e1;
		if(e2<min2) { min2=e2; pos2=i; }
	}

	pos1++; if(pos1==gTotalCount) pos1--;
	pos2++; if(pos2==gTotalCount) pos2--;
	if(min1<min2)
	{
		sc.m_iParity = 1;
		sc.m_rError = min1;
		sc.m_rThreshold = sc.GetOneFeature(gTrainSet[indexes[pos1]]);
	}
	else
	{
		sc.m_iParity = 0;
		sc.m_rError = min2;
		sc.m_rThreshold = sc.GetOneFeature(gTrainSet[indexes[pos2]]);
	}
}
Exemple #9
0
bool AdaBoost::train_(ClassificationData &trainingData){
    
    //Clear any previous model
    clear();
    
    if( trainingData.getNumSamples() <= 1 ){
        errorLog << "train_(ClassificationData &trainingData) - There are not enough training samples to train a model! Number of samples: " << trainingData.getNumSamples()  << endl;
        return false;
    }
    
    numInputDimensions = trainingData.getNumDimensions();
    numClasses = trainingData.getNumClasses();
    const UINT M = trainingData.getNumSamples();
    const UINT POSITIVE_LABEL = WEAK_CLASSIFIER_POSITIVE_CLASS_LABEL;
    const UINT NEGATIVE_LABEL = WEAK_CLASSIFIER_NEGATIVE_CLASS_LABEL;
    double alpha = 0;
    const double beta = 0.001;
    double epsilon = 0;
    TrainingResult trainingResult;
    
    const UINT K = (UINT)weakClassifiers.size();
    if( K == 0 ){
        errorLog << "train_(ClassificationData &trainingData) - No weakClassifiers have been set. You need to set at least one weak classifier first." << endl;
        return false;
    }

    classLabels.resize(numClasses);
    models.resize(numClasses);
    ranges = trainingData.getRanges();

    //Scale the training data if needed
    if( useScaling ){
        trainingData.scale(ranges,0,1);
    }
    
    //Create the weights vector
    VectorDouble weights(M);
    
    //Create the error matrix
    MatrixDouble errorMatrix(K,M);
    
    for(UINT classIter=0; classIter<numClasses; classIter++){
        
        //Get the class label for the current class
        classLabels[classIter] = trainingData.getClassLabels()[classIter];
        
        //Set the class label of the current model
        models[ classIter ].setClassLabel( classLabels[classIter] );
        
        //Setup the labels for this class, POSITIVE_LABEL == 1, NEGATIVE_LABEL == 2
        ClassificationData classData;
        classData.setNumDimensions(trainingData.getNumDimensions());
        for(UINT i=0; i<M; i++){
            UINT label = trainingData[i].getClassLabel()==classLabels[classIter] ? POSITIVE_LABEL : NEGATIVE_LABEL;
            VectorDouble trainingSample = trainingData[i].getSample();
            classData.addSample(label,trainingSample);
        }
        
        //Setup the initial training sample weights
        std::fill(weights.begin(),weights.end(),1.0/M);
        
        //Run the boosting loop
        bool keepBoosting = true;
        UINT t = 0;
        
        while( keepBoosting ){
            
            //Pick the classifier from the family of classifiers that minimizes the total error
            UINT bestClassifierIndex = 0;
            double minError = numeric_limits<double>::max();
            for(UINT k=0; k<K; k++){
                //Get the k'th possible classifier
                WeakClassifier *weakLearner = weakClassifiers[k];
                
                //Train the current classifier
                if( !weakLearner->train(classData,weights) ){
                    errorLog << "Failed to train weakLearner!" << endl;
                    return false;
                }
                
                //Compute the weighted error for this clasifier
                double e = 0;
                double positiveLabel = weakLearner->getPositiveClassLabel();
                double numCorrect = 0;
                double numIncorrect = 0;
                for(UINT i=0; i<M; i++){
                    //Only penalize errors
                    double prediction = weakLearner->predict( classData[i].getSample() );
                    
                    if( (prediction == positiveLabel && classData[i].getClassLabel() != POSITIVE_LABEL) ||        //False positive
                        (prediction != positiveLabel && classData[i].getClassLabel() == POSITIVE_LABEL) ){       //False negative
                        e += weights[i]; //Increase the error proportional to the weight of the example
                        errorMatrix[k][i] = 1; //Flag that there was an error
                        numIncorrect++;
                    }else{
                        errorMatrix[k][i] = 0; //Flag that there was no error
                        numCorrect++;
                    }
                }
                
                trainingLog << "PositiveClass: " << classLabels[classIter] << " Boosting Iter: " << t << " Classifier: " << k << " WeightedError: " << e << " NumCorrect: " << numCorrect/M << " NumIncorrect: " <<numIncorrect/M << endl;
                
                if( e < minError ){
                    minError = e;
                    bestClassifierIndex = k;
                }
                
            }
  
            epsilon = minError;
            
            //Set alpha, using the M1 weight value, small weights (close to 0) will receive a strong weight in the final classifier
            alpha = 0.5 * log( (1.0-epsilon)/epsilon );
            
            trainingLog << "PositiveClass: " << classLabels[classIter] << " Boosting Iter: " << t << " Best Classifier Index: " << bestClassifierIndex << " MinError: " << minError << " Alpha: " << alpha << endl;
            
            if( isinf(alpha) ){ keepBoosting = false; trainingLog << "Alpha is INF. Stopping boosting for current class" << endl; }
            if( 0.5 - epsilon <= beta ){ keepBoosting = false; trainingLog << "Epsilon <= Beta. Stopping boosting for current class" << endl; }
            if( ++t >= numBoostingIterations ) keepBoosting = false;

            trainingResult.setClassificationResult(t, minError, this);
            trainingResults.push_back(trainingResult);
            trainingResultsObserverManager.notifyObservers( trainingResult );
            
            if( keepBoosting ){
                
                //Add the best weak classifier to the committee
                models[ classIter ].addClassifierToCommitee( weakClassifiers[bestClassifierIndex], alpha );
                
                //Update the weights for the next boosting iteration
                double reWeight = (1.0 - epsilon) / epsilon;
                double oldSum = 0;
                double newSum = 0;
                for(UINT i=0; i<M; i++){
                    oldSum += weights[i];
                    //Only update the weights that resulted in an incorrect prediction
                    if( errorMatrix[bestClassifierIndex][i] == 1 ) weights[i] *= reWeight;
                    newSum += weights[i];
                }
                
                //Normalize all the weights
                //This results to increasing the weights of the samples that were incorrectly labelled
                //While decreasing the weights of the samples that were correctly classified
                reWeight = oldSum/newSum;
                for(UINT i=0; i<M; i++){
                    weights[i] *= reWeight;
                }
                
            }else{
                trainingLog << "Stopping boosting training at iteration : " << t-1 << " with an error of " << epsilon << endl;
                if( t-1 == 0 ){
                    //Add the best weak classifier to the committee (we have to add it as this is the first iteration)
                    if( isinf(alpha) ){ alpha = 1; } //If alpha is infinite then the first classifier got everything correct
                    models[ classIter ].addClassifierToCommitee( weakClassifiers[bestClassifierIndex], alpha );
                }
            }
            
        }
    }
    
    //Normalize the weights
    for(UINT k=0; k<numClasses; k++){
        models[k].normalizeWeights();
    }
    
    //Flag that the model has been trained
    trained = true;
    
    //Setup the data for prediction
    predictedClassLabel = 0;
    maxLikelihood = 0;
    classLikelihoods.resize(numClasses);
    classDistances.resize(numClasses);
    
    return true;
}
Exemple #10
0
void WriteSimpleClassifiers(void)
{
	int x1,x2,x3,x4,y1,y2,y3,y4;
	WeakClassifier sc;
	int index;
	ofstream f;

	int pickup=9;

	f.open("classifiers.txt");
	index = 0;

	for(x1=0;x1<gSx;x1+=1)
		for(x3=x1+2;x3<=gSx;x3+=2)
			for(y1=0;y1<gSy;y1+=1)
				for(y3=y1+1;y3<=gSy;y3+=1)
				{
					x2 = (x1+x3)/2;
					y2 = y4 = x4 = -1;
					sc.m_iType = 0; sc.m_rError = 0.0;
					sc.x1 = x1; sc.x2 = x2; sc.x3 = x3; sc.x4 = x4;
					sc.y1 = y1; sc.y2 = y2; sc.y3 = y3; sc.y4 = y4;
					sc.m_iParity = 0;
					sc.m_rThreshold = 0.0;
					if(index%10==pickup) sc.WriteToFile(f);
					index++;
				}

	for(x1=0;x1<gSx;x1+=1)
		for(x3=x1+1;x3<=gSx;x3+=1)
			for(y1=0;y1<gSy;y1+=1)
				for(y3=y1+2;y3<=gSy;y3+=2)
				{
					y2 = (y1+y3)/2;
					x2 = x4 = y4 = -1;
					sc.m_iType = 1; sc.m_rError = 0.0;
					sc.x1 = x1; sc.x2 = x2; sc.x3 = x3; sc.x4 = x4;
					sc.y1 = y1; sc.y2 = y2; sc.y3 = y3; sc.y4 = y4;
					sc.m_iParity = 0;
					sc.m_rThreshold = 0.0;
					if(index%10==pickup) sc.WriteToFile(f);
					index++;
				}

	for(x1=0;x1<gSx;x1++)
		for(x4=x1+3;x4<=gSx;x4+=3)
			for(y1=0;y1<gSy;y1+=1)
				for(y3=y1+1;y3<=gSy;y3+=1)
				{
					x2 = x1 + (x4-x1)/3;
					x3 = x2 + (x4-x1)/3;
					y2 = y4 = -1;
					sc.m_iType = 2; sc.m_rError = 0.0;
					sc.x1 = x1; sc.x2 = x2; sc.x3 = x3; sc.x4 = x4;
					sc.y1 = y1; sc.y2 = y2; sc.y3 = y3; sc.y4 = y4;
					sc.m_iParity = 0;
					sc.m_rThreshold = 0.0;
					if(index%10==pickup) sc.WriteToFile(f);
					index++;
				}

	for(x1=0;x1<gSx;x1++)
		for(x3=x1+1;x3<=gSx;x3+=1)
			for(y1=0;y1<gSy;y1++)
				for(y4=y1+3;y4<=gSy;y4+=3)
				{
					y2 = y1 + (y4-y1)/3;
					y3 = y2 + (y4-y1)/3;
					x2 = x4 = -1;
					sc.m_iType = 3; sc.m_rError = 0.0;
					sc.x1 = x1; sc.x2 = x2; sc.x3 = x3; sc.x4 = x4;
					sc.y1 = y1; sc.y2 = y2; sc.y3 = y3; sc.y4 = y4;
					sc.m_iParity = 0;
					sc.m_rThreshold = 0.0;
					if(index%10==pickup) sc.WriteToFile(f);
					index++;
				}

	for(x1=0;x1<gSx;x1+=1)
		for(x3=x1+2;x3<=gSx;x3+=2)
			for(y1=0;y1<gSy;y1+=1)
				for(y3=y1+2;y3<=gSy;y3+=2)
				{
					x2 = (x1+x3)/2;
					y2 = (y1+y3)/2;
					x4 = y4 = -1;
					sc.m_iType = 4; sc.m_rError = 0.0;
					sc.x1 = x1; sc.x2 = x2; sc.x3 = x3; sc.x4 = x4;
					sc.y1 = y1; sc.y2 = y2; sc.y3 = y3; sc.y4 = y4;
					sc.m_iParity = 0;
					sc.m_rThreshold = 0.0;
					if(index%10==pickup) sc.WriteToFile(f);
					index++;
				}

	f.close();
}
/**********************************************
 * Function: classifierGuess
 * -------------------------
 * Returns true if classifier decided a feature was POS, otherwise return false
 */
bool ImageTester::classifierGuess(WeakClassifier &wc, float value){
  return ( (wc.threshold() > value && !wc.isFlipped()) ||
      (wc.threshold() < value && wc.isFlipped()) );
}
Exemple #12
0
WeakClassifier::WeakClassifier(const WeakClassifier &other)
{
    m_internalNodes = other.internalNodes();
    m_leafValues = other.leafValues();
}