void FilterBoostLearner::run(const nor_utils::Args& args) { // load the arguments this->getArgs(args); time_t startTime, currentTime; time(&startTime); // get the registered weak learner (type from name) BaseLearner* pWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner(_baseLearnerName); // initialize learning options; normally it's done in the strong loop // also, here we do it for Product learners, so input data can be created pWeakHypothesisSource->initLearningOptions(args); BaseLearner* pConstantWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner("ConstantLearner"); // get the training input data, and load it InputData* pTrainingData = pWeakHypothesisSource->createInputData(); pTrainingData->initOptions(args); pTrainingData->load(_trainFileName, IT_TRAIN, _verbose); const int numClasses = pTrainingData->getNumClasses(); const int numExamples = pTrainingData->getNumExamples(); //initialize the margins variable _margins.resize( numExamples ); for( int i=0; i<numExamples; i++ ) { _margins[i].resize( numClasses ); fill( _margins[i].begin(), _margins[i].end(), 0.0 ); } // get the testing input data, and load it InputData* pTestData = NULL; if ( !_testFileName.empty() ) { pTestData = pWeakHypothesisSource->createInputData(); pTestData->initOptions(args); pTestData->load(_testFileName, IT_TEST, _verbose); } // The output information object OutputInfo* pOutInfo = NULL; if ( !_outputInfoFile.empty() ) { // Baseline: constant classifier - goes into 0th iteration BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ; pConstantWeakHypothesis->initLearningOptions(args); pConstantWeakHypothesis->setTrainingData(pTrainingData); AlphaReal constantEnergy = pConstantWeakHypothesis->run(); pOutInfo = new OutputInfo(args); pOutInfo->initialize(pTrainingData); updateMargins( pTrainingData, pConstantWeakHypothesis ); if (pTestData) pOutInfo->initialize(pTestData); pOutInfo->outputHeader(pTrainingData->getClassMap() ); pOutInfo->outputIteration(-1); pOutInfo->outputCustom(pTrainingData, pConstantWeakHypothesis); if (pTestData) { pOutInfo->separator(); pOutInfo->outputCustom(pTestData, pConstantWeakHypothesis); } pOutInfo->outputCurrentTime(); pOutInfo->endLine(); pOutInfo->initialize(pTrainingData); if (pTestData) pOutInfo->initialize(pTestData); } // reload the previously found weak learners if -resume is set. // otherwise just return 0 int startingIteration = resumeWeakLearners(pTrainingData); Serialization ss(_shypFileName, _isShypCompressed ); ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called // perform the resuming if necessary. If not it will just return resumeProcess(ss, pTrainingData, pTestData, pOutInfo); if (_verbose == 1) cout << "Learning in progress..." << endl; /////////////////////////////////////////////////////////////////////// // Starting the AdaBoost main loop /////////////////////////////////////////////////////////////////////// for (int t = startingIteration; t < _numIterations; ++t) { if (_verbose > 1) cout << "------- WORKING ON ITERATION " << (t+1) << " -------" << endl; // create the weak learner BaseLearner* pWeakHypothesis; BaseLearner* pConstantWeakHypothesis; pWeakHypothesis = pWeakHypothesisSource->create(); pWeakHypothesis->initLearningOptions(args); //pTrainingData->clearIndexSet(); pWeakHypothesis->setTrainingData(pTrainingData); AlphaReal edge, energy=0.0; // create the constant learner pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ; pConstantWeakHypothesis->initLearningOptions(args); pConstantWeakHypothesis->setTrainingData(pTrainingData); AlphaReal constantEdge = -numeric_limits<AlphaReal>::max(); int currentNumberOfUsedData = static_cast<int>(_Cn * log(t+3.0)); if ( _onlineWeakLearning ) { //check whether the weak learner is a ScalarLeaerner try { StochasticLearner* pStochasticLearner = dynamic_cast<StochasticLearner*>(pWeakHypothesis); StochasticLearner* pStochasticConstantWeakHypothesis = dynamic_cast<StochasticLearner*> (pConstantWeakHypothesis); pStochasticLearner->initLearning(); pStochasticConstantWeakHypothesis->initLearning(); if (_verbose>1) cout << "Number of random instances: \t" << currentNumberOfUsedData << endl; // set the weights setWeightToMargins(pTrainingData); //learning for (int i=0; i<currentNumberOfUsedData; ++i ) { int randomIndex = (rand() % pTrainingData->getNumExamples()); //int randomIndex = getRandomIndex(); pStochasticLearner->update(randomIndex); pStochasticConstantWeakHypothesis->update(randomIndex); } pStochasticLearner->finishLearning(); pStochasticConstantWeakHypothesis->finishLearning(); } catch (bad_cast& e) { cerr << "The weak learner must be a StochasticLearner!!!" << endl; exit(-1); } } else { filter( pTrainingData, currentNumberOfUsedData ); if ( pTrainingData->getNumExamples() < 2 ) { filter( pTrainingData, currentNumberOfUsedData, false ); } if (_verbose > 1) { cout << "--> Size of training data = " << pTrainingData->getNumExamples() << endl; } energy = pWeakHypothesis->run(); pConstantWeakHypothesis->run(); } //estimate edge filter( pTrainingData, currentNumberOfUsedData, false ); edge = pWeakHypothesis->getEdge(true) / 2.0; constantEdge = pConstantWeakHypothesis->getEdge() / 2.0; if ( constantEdge > edge ) { delete pWeakHypothesis; pWeakHypothesis = pConstantWeakHypothesis; edge = constantEdge; } else { delete pConstantWeakHypothesis; } // calculate alpha AlphaReal alpha = 0.0; alpha = 0.5 * log( ( 1 + edge ) / ( 1 - edge ) ); pWeakHypothesis->setAlpha( alpha ); _sumAlpha += alpha; if (_verbose > 1) cout << "Weak learner: " << pWeakHypothesis->getName()<< endl; // Output the step-by-step information pTrainingData->clearIndexSet(); printOutputInfo(pOutInfo, t, pTrainingData, pTestData, pWeakHypothesis); // Updates the weights and returns the edge //AlphaReal gamma = updateWeights(pTrainingData, pWeakHypothesis); if (_verbose > 1) { cout << setprecision(5) << "--> Alpha = " << pWeakHypothesis->getAlpha() << endl << "--> Edge = " << edge << endl << "--> Energy = " << energy << endl // << "--> ConstantEnergy = " << constantEnergy << endl // << "--> difference = " << (energy - constantEnergy) << endl ; } // update the margins //saveMargins(); updateMargins( pTrainingData, pWeakHypothesis ); // append the current weak learner to strong hypothesis file, // that is, serialize it. ss.appendHypothesis(t, pWeakHypothesis); // Add it to the internal list of weak hypotheses _foundHypotheses.push_back(pWeakHypothesis); // check if the time limit has been reached if (_maxTime > 0) { time( ¤tTime ); float diff = difftime(currentTime, startTime); // difftime is in seconds diff /= 60; // = minutes if (diff > _maxTime) { if (_verbose > 0) cout << "Time limit of " << _maxTime << " minutes has been reached!" << endl; break; } } // check for maxtime delete pWeakHypothesis; } // loop on iterations ///////////////////////////////////////////////////////// // write the footer of the strong hypothesis file ss.writeFooter(); // Free the two input data objects if (pTrainingData) delete pTrainingData; if (pTestData) delete pTestData; if (pOutInfo) delete pOutInfo; if (_verbose > 0) cout << "Learning completed." << endl; }
void FilterBoostLearner::run(const nor_utils::Args& args) { // load the arguments this->getArgs(args); time_t startTime, currentTime; time(&startTime); // get the registered weak learner (type from name) BaseLearner* pWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner(_baseLearnerName); // initialize learning options; normally it's done in the strong loop // also, here we do it for Product learners, so input data can be created pWeakHypothesisSource->initLearningOptions(args); BaseLearner* pConstantWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner("ConstantLearner"); // get the training input data, and load it InputData* pTrainingData = pWeakHypothesisSource->createInputData(); pTrainingData->initOptions(args); pTrainingData->load(_trainFileName, IT_TRAIN, _verbose); const int numClasses = pTrainingData->getNumClasses(); const int numExamples = pTrainingData->getNumExamples(); //initialize the margins variable _margins.resize( numExamples ); for( int i=0; i<numExamples; i++ ) { _margins[i].resize( numClasses ); fill( _margins[i].begin(), _margins[i].end(), 0.0 ); } // get the testing input data, and load it InputData* pTestData = NULL; if ( !_testFileName.empty() ) { pTestData = pWeakHypothesisSource->createInputData(); pTestData->initOptions(args); pTestData->load(_testFileName, IT_TEST, _verbose); } // The output information object OutputInfo* pOutInfo = NULL; if ( !_outputInfoFile.empty() ) { // Baseline: constant classifier - goes into 0th iteration BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ; pConstantWeakHypothesis->initLearningOptions(args); pConstantWeakHypothesis->setTrainingData(pTrainingData); float constantEnergy = pConstantWeakHypothesis->run(); pOutInfo = new OutputInfo(_outputInfoFile); pOutInfo->initialize(pTrainingData); updateMargins( pTrainingData, pConstantWeakHypothesis ); if (pTestData) pOutInfo->initialize(pTestData); pOutInfo->outputHeader(); pOutInfo->outputIteration(-1); pOutInfo->outputError(pTrainingData, pConstantWeakHypothesis); if (pTestData) pOutInfo->outputError(pTestData, pConstantWeakHypothesis); /* pOutInfo->outputMargins(pTrainingData, pConstantWeakHypothesis); pOutInfo->outputEdge(pTrainingData, pConstantWeakHypothesis); if (pTestData) pOutInfo->outputMargins(pTestData, pConstantWeakHypothesis); pOutInfo->outputMAE(pTrainingData); if (pTestData) pOutInfo->outputMAE(pTestData); */ pOutInfo->outputCurrentTime(); pOutInfo->endLine(); pOutInfo->initialize(pTrainingData); if (pTestData) pOutInfo->initialize(pTestData); } // reload the previously found weak learners if -resume is set. // otherwise just return 0 int startingIteration = resumeWeakLearners(pTrainingData); Serialization ss(_shypFileName, _isShypCompressed ); ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called // perform the resuming if necessary. If not it will just return resumeProcess(ss, pTrainingData, pTestData, pOutInfo); if (_verbose == 1) cout << "Learning in progress..." << endl; /////////////////////////////////////////////////////////////////////// // Starting the AdaBoost main loop /////////////////////////////////////////////////////////////////////// for (int t = startingIteration; t < _numIterations; ++t) { if (_verbose > 1) cout << "------- WORKING ON ITERATION " << (t+1) << " -------" << endl; filter( pTrainingData, (int)(_Cn * log(t+2.0)) ); if ( pTrainingData->getNumExamples() < 2 ) { filter( pTrainingData, (int)(_Cn * log(t+2.0)), false ); } if (_verbose > 1) { cout << "--> Size of training data = " << pTrainingData->getNumExamples() << endl; } BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create(); pWeakHypothesis->initLearningOptions(args); //pTrainingData->clearIndexSet(); pWeakHypothesis->setTrainingData(pTrainingData); float energy = pWeakHypothesis->run(); BaseLearner* pConstantWeakHypothesis; if (_withConstantLearner) // check constant learner if user wants it { pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ; pConstantWeakHypothesis->initLearningOptions(args); pConstantWeakHypothesis->setTrainingData(pTrainingData); float constantEnergy = pConstantWeakHypothesis->run(); } //estimate edge filter( pTrainingData, (int)(_Cn * log(t+2.0)), false ); float edge = pWeakHypothesis->getEdge() / 2.0; if (_withConstantLearner) // check constant learner if user wants it { float constantEdge = pConstantWeakHypothesis->getEdge() / 2.0; if ( constantEdge > edge ) { delete pWeakHypothesis; pWeakHypothesis = pConstantWeakHypothesis; edge = constantEdge; } else { delete pConstantWeakHypothesis; } } // calculate alpha float alpha = 0.0; alpha = 0.5 * log( ( 0.5 + edge ) / ( 0.5 - edge ) ); pWeakHypothesis->setAlpha( alpha ); if (_verbose > 1) cout << "Weak learner: " << pWeakHypothesis->getName()<< endl; // Output the step-by-step information pTrainingData->clearIndexSet(); printOutputInfo(pOutInfo, t, pTrainingData, pTestData, pWeakHypothesis); // Updates the weights and returns the edge float gamma = updateWeights(pTrainingData, pWeakHypothesis); if (_verbose > 1) { cout << setprecision(5) << "--> Alpha = " << pWeakHypothesis->getAlpha() << endl << "--> Edge = " << gamma << endl << "--> Energy = " << energy << endl // << "--> ConstantEnergy = " << constantEnergy << endl // << "--> difference = " << (energy - constantEnergy) << endl ; } // update the margins updateMargins( pTrainingData, pWeakHypothesis ); // append the current weak learner to strong hypothesis file, // that is, serialize it. ss.appendHypothesis(t, pWeakHypothesis); // Add it to the internal list of weak hypotheses _foundHypotheses.push_back(pWeakHypothesis); // check if the time limit has been reached if (_maxTime > 0) { time( ¤tTime ); float diff = difftime(currentTime, startTime); // difftime is in seconds diff /= 60; // = minutes if (diff > _maxTime) { if (_verbose > 0) cout << "Time limit of " << _maxTime << " minutes has been reached!" << endl; break; } } // check for maxtime delete pWeakHypothesis; } // loop on iterations ///////////////////////////////////////////////////////// // write the footer of the strong hypothesis file ss.writeFooter(); // Free the two input data objects if (pTrainingData) delete pTrainingData; if (pTestData) delete pTestData; if (pOutInfo) delete pOutInfo; if (_verbose > 0) cout << "Learning completed." << endl; }
void AdaBoostMHLearner::run(const nor_utils::Args& args) { // load the arguments this->getArgs(args); // get the registered weak learner (type from name) BaseLearner* pWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner(_baseLearnerName); // initialize learning options; normally it's done in the strong loop // also, here we do it for Product learners, so input data can be created pWeakHypothesisSource->initLearningOptions(args); BaseLearner* pConstantWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner("ConstantLearner"); // get the training input data, and load it InputData* pTrainingData = pWeakHypothesisSource->createInputData(); pTrainingData->initOptions(args); pTrainingData->load(_trainFileName, IT_TRAIN, _verbose); // get the testing input data, and load it InputData* pTestData = NULL; if ( !_testFileName.empty() ) { pTestData = pWeakHypothesisSource->createInputData(); pTestData->initOptions(args); pTestData->load(_testFileName, IT_TEST, _verbose); } // The output information object OutputInfo* pOutInfo = NULL; if ( !_outputInfoFile.empty() ) { // Baseline: constant classifier - goes into 0th iteration BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ; pConstantWeakHypothesis->initLearningOptions(args); pConstantWeakHypothesis->setTrainingData(pTrainingData); AlphaReal constantEnergy = pConstantWeakHypothesis->run(); //pOutInfo = new OutputInfo(_outputInfoFile); pOutInfo = new OutputInfo(args); pOutInfo->initialize(pTrainingData); if (pTestData) pOutInfo->initialize(pTestData); pOutInfo->outputHeader(pTrainingData->getClassMap()); pOutInfo->outputIteration(-1); pOutInfo->outputCustom(pTrainingData, pConstantWeakHypothesis); if (pTestData != NULL) { pOutInfo->separator(); pOutInfo->outputCustom(pTestData, pConstantWeakHypothesis); } pOutInfo->outputCurrentTime(); pOutInfo->endLine(); pOutInfo->initialize(pTrainingData); if (pTestData) pOutInfo->initialize(pTestData); } //cout << "Before serialization" << endl; // reload the previously found weak learners if -resume is set. // otherwise just return 0 int startingIteration = resumeWeakLearners(pTrainingData); Serialization ss(_shypFileName, _isShypCompressed ); ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called // perform the resuming if necessary. If not it will just return resumeProcess(ss, pTrainingData, pTestData, pOutInfo); if (_verbose == 1) cout << "Learning in progress..." << endl; //I put here the starting time, but it may take very long time to load the saved model time_t startTime, currentTime; time(&startTime); /////////////////////////////////////////////////////////////////////// // Starting the AdaBoost main loop /////////////////////////////////////////////////////////////////////// for (int t = startingIteration; t < _numIterations; ++t) { if (_verbose > 1) cout << "------- WORKING ON ITERATION " << (t+1) << " -------" << endl; BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create(); pWeakHypothesis->initLearningOptions(args); //pTrainingData->clearIndexSet(); pWeakHypothesis->setTrainingData(pTrainingData); AlphaReal energy = pWeakHypothesis->run(); //float gamma = pWeakHypothesis->getEdge(); //cout << gamma << endl; if ( (_withConstantLearner) || ( energy != energy ) ) // check constant learner if user wants it (if energi is nan, then we chose constant learner { BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ; pConstantWeakHypothesis->initLearningOptions(args); pConstantWeakHypothesis->setTrainingData(pTrainingData); AlphaReal constantEnergy = pConstantWeakHypothesis->run(); if ( (constantEnergy <= energy) || ( energy != energy ) ) { delete pWeakHypothesis; pWeakHypothesis = pConstantWeakHypothesis; } } if (_verbose > 1) cout << "Weak learner: " << pWeakHypothesis->getName()<< endl; // Output the step-by-step information printOutputInfo(pOutInfo, t, pTrainingData, pTestData, pWeakHypothesis); // Updates the weights and returns the edge AlphaReal gamma = updateWeights(pTrainingData, pWeakHypothesis); if (_verbose > 1) { cout << setprecision(5) << "--> Alpha = " << pWeakHypothesis->getAlpha() << endl << "--> Edge = " << gamma << endl << "--> Energy = " << energy << endl // << "--> ConstantEnergy = " << constantEnergy << endl // << "--> difference = " << (energy - constantEnergy) << endl ; } // If gamma <= theta the algorithm must stop. // If theta == 0 and gamma is 0, it means that the weak learner is no better than chance // and no further training is possible. if (gamma <= _theta) { if (_verbose > 0) { cout << "Can't train any further: edge = " << gamma << " (with and edge offset (theta)=" << _theta << ")" << endl; } // delete pWeakHypothesis; // break; } // append the current weak learner to strong hypothesis file, // that is, serialize it. ss.appendHypothesis(t, pWeakHypothesis); // Add it to the internal list of weak hypotheses _foundHypotheses.push_back(pWeakHypothesis); // check if the time limit has been reached if (_maxTime > 0) { time( ¤tTime ); float diff = difftime(currentTime, startTime); // difftime is in seconds diff /= 60; // = minutes if (diff > _maxTime) { if (_verbose > 0) cout << "Time limit of " << _maxTime << " minutes has been reached!" << endl; break; } } // check for maxtime delete pWeakHypothesis; } // loop on iterations ///////////////////////////////////////////////////////// // write the footer of the strong hypothesis file ss.writeFooter(); // write the weights of the instances if the name of weights file isn't empty printOutWeights( pTrainingData ); // Free the two input data objects if (pTrainingData) delete pTrainingData; if (pTestData) delete pTestData; if (pOutInfo) delete pOutInfo; if (_verbose > 0) cout << "Learning completed." << endl; }
// ------------------------------------------------------------------------- void AdaBoostMHLearner::run( const nor_utils::Args& args, InputData* pTrainingData, const string baseLearnerName, const int numIterations, vector<BaseLearner*>& foundHypotheses ) { // get the registered weak learner (type from name) BaseLearner* pWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner(baseLearnerName); // initialize learning options; normally it's done in the strong loop // also, here we do it for Product learners, so input data can be created pWeakHypothesisSource->initLearningOptions(args); BaseLearner* pConstantWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner("ConstantLearner"); if (_verbose == 1) cout << "Learning in progress..." << endl; /////////////////////////////////////////////////////////////////////// // Starting the AdaBoost main loop /////////////////////////////////////////////////////////////////////// for (int t = 0; t < numIterations; ++t) { if ((_verbose > 0)&&((t%100)==0)) cout << "--------------[ Boosting iteration " << (t+1) << " ]--------------" << endl; BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create(); pWeakHypothesis->initLearningOptions(args); //pTrainingData->clearIndexSet(); pWeakHypothesis->setTrainingData(pTrainingData); AlphaReal energy = pWeakHypothesis->run(); //float gamma = pWeakHypothesis->getEdge(); //cout << gamma << endl; if ( (_withConstantLearner) || ( energy != energy ) ) // check constant learner if user wants it (if energi is nan, then we chose constant learner { BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ; pConstantWeakHypothesis->initLearningOptions(args); pConstantWeakHypothesis->setTrainingData(pTrainingData); AlphaReal constantEnergy = pConstantWeakHypothesis->run(); if ( (constantEnergy <= energy) || ( energy != energy ) ) { delete pWeakHypothesis; pWeakHypothesis = pConstantWeakHypothesis; } } if (_verbose > 1) cout << "Weak learner: " << pWeakHypothesis->getName()<< endl; // Updates the weights and returns the edge AlphaReal gamma = updateWeights(pTrainingData, pWeakHypothesis); if (_verbose > 1) { cout << setprecision(5) << "--> Alpha = " << pWeakHypothesis->getAlpha() << endl << "--> Edge = " << gamma << endl << "--> Energy = " << energy << endl // << "--> ConstantEnergy = " << constantEnergy << endl // << "--> difference = " << (energy - constantEnergy) << endl ; } // If gamma <= theta the algorithm must stop. // If theta == 0 and gamma is 0, it means that the weak learner is no better than chance // and no further training is possible. if (gamma <= _theta) { if (_verbose > 0) { cout << "Can't train any further: edge = " << gamma << " (with and edge offset (theta)=" << _theta << ")" << endl; } // delete pWeakHypothesis; // break; } // Add it to the internal list of weak hypotheses foundHypotheses.push_back(pWeakHypothesis); } // loop on iterations ///////////////////////////////////////////////////////// if (_verbose > 0) cout << "--------------[ AdaBoost Learning completed. ]--------------" << endl; }
float BanditProductLearner::run() { if ( ! this->_banditAlgo->isInitialized() ) { init(); } // the bandit algorithm selects the subset the tree learner is allowed to use // the armindexes will be stored in _armsForPulling getArms(); const int numClasses = _pTrainingData->getNumClasses(); const int numExamples = _pTrainingData->getNumExamples(); // Backup original labels for (int i = 0; i < numExamples; ++i) { const vector<Label>& labels = _pTrainingData->getLabels(i); vector<char> exampleLabels; for (int l = 0; l < numClasses; ++l) exampleLabels.push_back(labels[l].y); _savedLabels.push_back(exampleLabels); } for(int ib = 0; ib < _numBaseLearners; ++ib) _baseLearners[ib]->setTrainingData(_pTrainingData); float energy = numeric_limits<float>::max(); float previousEnergy, hx, previousAlpha; BaseLearner* pPreviousBaseLearner = 0; bool firstLoop = true; int ib = -1; while (1) { ib += 1; if (ib >= _numBaseLearners) { ib = 0; firstLoop = false; } previousEnergy = energy; previousAlpha = _alpha; if (pPreviousBaseLearner) delete pPreviousBaseLearner; if ( !firstLoop ) { // take the old learner off the labels for (int i = 0; i < numExamples; ++i) { vector<Label>& labels = _pTrainingData->getLabels(i); for (int l = 0; l < numClasses; ++l) { // Here we could have the option of using confidence rated setting so the // real valued output of classify instead of its sign hx = _baseLearners[ib]->classify(_pTrainingData,i,l); if ( hx < 0 ) labels[l].y *= -1; else if ( hx == 0 ) { // have to redo the multiplications, haven't been tested for(int ib1 = 0; ib1 < _numBaseLearners && labels[l].y != 0; ++ib1) { if (ib != ib1) { hx = _baseLearners[ib1]->classify(_pTrainingData,i,l); if (hx < 0) labels[l].y *= -1; else if (hx == 0) labels[l].y = 0; } } } } } } pPreviousBaseLearner = _baseLearners[ib]->copyState(); energy = dynamic_cast< FeaturewiseLearner* >(_baseLearners[ib])->run(_armsForPulling ); // check if it is signailing_nan if ( energy != energy ) { if (_verbose > 2) { cout << "Cannot find weak hypothesis, constant learner is used!!" << endl; } BaseLearner* pConstantWeakHypothesisSource = BaseLearner::RegisteredLearners().getLearner("ConstantLearner"); BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ; pConstantWeakHypothesis->setTrainingData( _pTrainingData ); energy = pConstantWeakHypothesis->run(); delete _baseLearners[ib]; _baseLearners[ib] = pConstantWeakHypothesis; } _alpha = _baseLearners[ib]->getAlpha(); if (_verbose > 2) { cout << "E[" << (ib+1) << "] = " << energy << endl << flush; cout << "alpha[" << (ib+1) << "] = " << _alpha << endl << flush; } for (int i = 0; i < numExamples; ++i) { vector<Label>& labels = _pTrainingData->getLabels(i); for (int l = 0; l < numClasses; ++l) { // Here we could have the option of using confidence rated setting so the // real valued output of classify instead of its sign if (labels[l].y != 0) { // perhaps replace it by nor_utils::is_zero(labels[l].y) hx = _baseLearners[ib]->classify(_pTrainingData,i,l); if ( hx < 0 ) labels[l].y *= -1; else if ( hx == 0 ) labels[l].y = 0; } } } // We have to do at least one full iteration. For real it's not guaranteed // Alternatively we could initialize all of them to constant // if ( !firstLoop && energy >= previousEnergy ) { // if (energy > previousEnergy) { // _baseLearners[ib] = pPreviousBaseLearner->copyState(); // delete pPreviousBaseLearner; // energy = previousEnergy; // _alpha = _baseLearners[ib]->getAlpha(); // } // break; // } if ( energy >= previousEnergy ) { _alpha = previousAlpha; energy = previousEnergy; if (firstLoop) { for(int ib2 = ib; ib2 < _numBaseLearners; ++ib2) delete _baseLearners[ib2]; _numBaseLearners = ib; } else { _baseLearners[ib] = pPreviousBaseLearner->copyState(); } delete pPreviousBaseLearner; break; } } // Restore original labels for (int i = 0; i < numExamples; ++i) { vector<Label>& labels = _pTrainingData->getLabels(i); for (int l = 0; l < numClasses; ++l) labels[l].y = _savedLabels[i][l]; } _id = _baseLearners[0]->getId(); for(int ib = 1; ib < _numBaseLearners; ++ib) _id += "_x_" + _baseLearners[ib]->getId(); //bandit part we calculate the reward _reward = getRewardFromEdge( getEdge() ); provideRewardForBanditAlgo(); return energy; }