/************************************* * Function: update_feature_weight * ------------------------------- * Given TrainingData and a WeakClassifier that has been weighted in * get_best_classifier(), we recalculate the weights of all the features * * td: training data (set of features) * wc: (weighted) weak classifier * * returns true if successful, false otherwise */ bool AdaBooster::update_feature_weight(TrainingData &td, WeakClassifier &wc){ // check that WeakClassifier has actually been weighted if (wc.weight() < 0){ printf("Error in update_feature_weight: WeakClassifier has invalid weight\n"); return false; } // traverse features in feature set and adjust their weights for (unsigned int i=0; i<num_features; i++){ FeatureVector* fv = td.feature(i); // either 1 or -1 (used in weight below) int is_correct = is_classifier_correct(wc, *fv) ? 1 : -1; // calculate and update weight // note M_E := 2.71828 float weight = pow(M_E, (double) -1 * wc.weight() * is_correct); td.setWeight(i, td.weight(i)*weight); } // calculate normalization factor float norm = 0; for (unsigned int i=0; i<num_features; i++) norm += td.weight(i); // normalize feature weights for (unsigned int i=0; i<num_features; i++) td.setWeight(i, td.weight(i)/norm); return true; // successful }
float StrongClassifier::evaluate(const std::vector<float> &features) const { //feature float decision = 0; for (int i=0; i<m_weakClassifiers.size(); i++) { WeakClassifier weak = m_weakClassifiers.at(i); int sign; if ( (weak.threshold() > features[weak.dimension()] && !weak.isFlipped()) || (weak.threshold() < features[weak.dimension()] && weak.isFlipped()) ) sign = 1; else sign = -1; decision += weak.weight() * sign; } return decision; }