/************************************* * Function: update_feature_weight * ------------------------------- * Given TrainingData and a WeakClassifier that has been weighted in * get_best_classifier(), we recalculate the weights of all the features * * td: training data (set of features) * wc: (weighted) weak classifier * * returns true if successful, false otherwise */ bool AdaBooster::update_feature_weight(TrainingData &td, WeakClassifier &wc){ // check that WeakClassifier has actually been weighted if (wc.weight() < 0){ printf("Error in update_feature_weight: WeakClassifier has invalid weight\n"); return false; } // traverse features in feature set and adjust their weights for (unsigned int i=0; i<num_features; i++){ FeatureVector* fv = td.feature(i); // either 1 or -1 (used in weight below) int is_correct = is_classifier_correct(wc, *fv) ? 1 : -1; // calculate and update weight // note M_E := 2.71828 float weight = pow(M_E, (double) -1 * wc.weight() * is_correct); td.setWeight(i, td.weight(i)*weight); } // calculate normalization factor float norm = 0; for (unsigned int i=0; i<num_features; i++) norm += td.weight(i); // normalize feature weights for (unsigned int i=0; i<num_features; i++) td.setWeight(i, td.weight(i)/norm); return true; // successful }
/************************************** * Function: init_feature_weight * ----------------------------- * Initializes weights of features to be 1/(total num of features) * * td: training data -- set of features */ void AdaBooster::init_feature_weight(TrainingData &td){ // set all points to same weight = 1/(# of features) for (unsigned int i=0; i<num_features; i++) td.setWeight(i, 1./num_features); }