コード例 #1
0
	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------
	// -----------------------------------------------------------------------	
	void UnSerialization::loadHypothesis(nor_utils::StreamTokenizer& st, 
										 vector<BaseLearner*>& weakHypotheses,
										 InputData* pTrainingData, int verbose)
	{
		string basicLearnerName =  seekAndParseEnclosedValue<string>(st, "weakLearner");
		
		// Check if the weak learner exists
		if ( !BaseLearner::RegisteredLearners().hasLearner(basicLearnerName) ) {
			cerr << "ERROR: Weak learner <" << basicLearnerName << "> not registered!!" << endl;
			exit(1);
		}
		
		// allocate the weak learner object
		BaseLearner* pWeakHypothesis = 
		BaseLearner::RegisteredLearners().getLearner(basicLearnerName)->create();
		pWeakHypothesis->setTrainingData(pTrainingData);
		
		// load it
		pWeakHypothesis->load(st);
		
		// at least </weakhyp> should be expected,
		// therefore this was a broken weak learner
		if ( !st.has_token() ) {
			cerr << "WARNING: Incomplete weak hypothesis file found. Check the shyp file!" << endl;
			delete pWeakHypothesis;
			return;
		}
		// store it in the vector
		weakHypotheses.push_back(pWeakHypothesis);
		
		// show some progress while loading on verbose > 1
		if (verbose > 1 && weakHypotheses.size() % 1000 == 0)
			cout << "." << flush;
	}
コード例 #2
0
ファイル: FilterBoostLearner.cpp プロジェクト: ShenWei/src
	void FilterBoostLearner::run(const nor_utils::Args& args)
	{
		// load the arguments
		this->getArgs(args);

		time_t startTime, currentTime;
		time(&startTime);

		// get the registered weak learner (type from name)
		BaseLearner* pWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
		// initialize learning options; normally it's done in the strong loop
		// also, here we do it for Product learners, so input data can be created
		pWeakHypothesisSource->initLearningOptions(args);

		BaseLearner* pConstantWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

		// get the training input data, and load it

		InputData* pTrainingData = pWeakHypothesisSource->createInputData();
		pTrainingData->initOptions(args);
		pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);

		const int numClasses = pTrainingData->getNumClasses();
		const int numExamples = pTrainingData->getNumExamples();
		
		//initialize the margins variable
		_margins.resize( numExamples );
		for( int i=0; i<numExamples; i++ )
		{
			_margins[i].resize( numClasses );
			fill( _margins[i].begin(), _margins[i].end(), 0.0 );
		}


		// get the testing input data, and load it
		InputData* pTestData = NULL;
		if ( !_testFileName.empty() )
		{
			pTestData = pWeakHypothesisSource->createInputData();
			pTestData->initOptions(args);
			pTestData->load(_testFileName, IT_TEST, _verbose);
		}

		// The output information object
		OutputInfo* pOutInfo = NULL;


		if ( !_outputInfoFile.empty() ) 
		{
			// Baseline: constant classifier - goes into 0th iteration

			BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
			pConstantWeakHypothesis->initLearningOptions(args);
			pConstantWeakHypothesis->setTrainingData(pTrainingData);
			float constantEnergy = pConstantWeakHypothesis->run();

			pOutInfo = new OutputInfo(_outputInfoFile);
			pOutInfo->initialize(pTrainingData);

			updateMargins( pTrainingData, pConstantWeakHypothesis );

			if (pTestData)
				pOutInfo->initialize(pTestData);
			pOutInfo->outputHeader();

			pOutInfo->outputIteration(-1);
			pOutInfo->outputError(pTrainingData, pConstantWeakHypothesis);

			if (pTestData)
				pOutInfo->outputError(pTestData, pConstantWeakHypothesis);
			/*
			pOutInfo->outputMargins(pTrainingData, pConstantWeakHypothesis);
			
			pOutInfo->outputEdge(pTrainingData, pConstantWeakHypothesis);

			if (pTestData)
				pOutInfo->outputMargins(pTestData, pConstantWeakHypothesis);

			pOutInfo->outputMAE(pTrainingData);

			if (pTestData)
				pOutInfo->outputMAE(pTestData);
			*/
			pOutInfo->outputCurrentTime();

			pOutInfo->endLine();
			pOutInfo->initialize(pTrainingData);
			
			if (pTestData)
				pOutInfo->initialize(pTestData);
		}
		// reload the previously found weak learners if -resume is set. 
		// otherwise just return 0
		int startingIteration = resumeWeakLearners(pTrainingData);


		Serialization ss(_shypFileName, _isShypCompressed );
		ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called

		// perform the resuming if necessary. If not it will just return
		resumeProcess(ss, pTrainingData, pTestData, pOutInfo);

		if (_verbose == 1)
			cout << "Learning in progress..." << endl;

		///////////////////////////////////////////////////////////////////////
		// Starting the AdaBoost main loop
		///////////////////////////////////////////////////////////////////////
		for (int t = startingIteration; t < _numIterations; ++t)
		{
			if (_verbose > 1)
				cout << "------- WORKING ON ITERATION " << (t+1) << " -------" << endl;

			filter( pTrainingData, (int)(_Cn * log(t+2.0)) );
			if ( pTrainingData->getNumExamples() < 2 ) 
			{
				filter( pTrainingData, (int)(_Cn * log(t+2.0)), false );
			}
			
			if (_verbose > 1)
			{
				cout << "--> Size of training data = " << pTrainingData->getNumExamples() << endl;
			}

			BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create();
			pWeakHypothesis->initLearningOptions(args);
			//pTrainingData->clearIndexSet();
			pWeakHypothesis->setTrainingData(pTrainingData);
			float energy = pWeakHypothesis->run();

			BaseLearner* pConstantWeakHypothesis;
			if (_withConstantLearner) // check constant learner if user wants it
			{
				pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
				pConstantWeakHypothesis->initLearningOptions(args);
				pConstantWeakHypothesis->setTrainingData(pTrainingData);
				float constantEnergy = pConstantWeakHypothesis->run();
			}

			//estimate edge
			filter( pTrainingData, (int)(_Cn * log(t+2.0)), false );
			float edge = pWeakHypothesis->getEdge() / 2.0;

			if (_withConstantLearner) // check constant learner if user wants it
			{
				float constantEdge = pConstantWeakHypothesis->getEdge() / 2.0;
				if ( constantEdge > edge )
				{
					delete pWeakHypothesis;
					pWeakHypothesis = pConstantWeakHypothesis;
					edge = constantEdge;
				} else {
					delete pConstantWeakHypothesis;
				}
			}

			// calculate alpha
			float alpha = 0.0;
			alpha = 0.5 * log( ( 0.5 + edge ) / ( 0.5 - edge ) );
			pWeakHypothesis->setAlpha( alpha );

			if (_verbose > 1)
				cout << "Weak learner: " << pWeakHypothesis->getName()<< endl;
			// Output the step-by-step information
			pTrainingData->clearIndexSet();
			printOutputInfo(pOutInfo, t, pTrainingData, pTestData, pWeakHypothesis);

			// Updates the weights and returns the edge
			float gamma = updateWeights(pTrainingData, pWeakHypothesis);

			if (_verbose > 1)
			{
				cout << setprecision(5)
					<< "--> Alpha = " << pWeakHypothesis->getAlpha() << endl
					<< "--> Edge  = " << gamma << endl
					<< "--> Energy  = " << energy << endl
					//            << "--> ConstantEnergy  = " << constantEnergy << endl
					//            << "--> difference  = " << (energy - constantEnergy) << endl
					;
			}

			// update the margins
			updateMargins( pTrainingData, pWeakHypothesis );

			// append the current weak learner to strong hypothesis file,
			// that is, serialize it.
			ss.appendHypothesis(t, pWeakHypothesis);

			// Add it to the internal list of weak hypotheses
			_foundHypotheses.push_back(pWeakHypothesis); 

			// check if the time limit has been reached
			if (_maxTime > 0)
			{
				time( &currentTime );
				float diff = difftime(currentTime, startTime); // difftime is in seconds
				diff /= 60; // = minutes

				if (diff > _maxTime)
				{
					if (_verbose > 0)
						cout << "Time limit of " << _maxTime 
						<< " minutes has been reached!" << endl;
					break;     
				}
			} // check for maxtime
			delete pWeakHypothesis;
		}  // loop on iterations
		/////////////////////////////////////////////////////////

		// write the footer of the strong hypothesis file
		ss.writeFooter();

		// Free the two input data objects
		if (pTrainingData)
			delete pTrainingData;
		if (pTestData)
			delete pTestData;

		if (pOutInfo)
			delete pOutInfo;

		if (_verbose > 0)
			cout << "Learning completed." << endl;
	}
コード例 #3
0
    void FilterBoostLearner::run(const nor_utils::Args& args)
    {
        // load the arguments
        this->getArgs(args);

        time_t startTime, currentTime;
        time(&startTime);

        // get the registered weak learner (type from name)
        BaseLearner* pWeakHypothesisSource = 
            BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
        // initialize learning options; normally it's done in the strong loop
        // also, here we do it for Product learners, so input data can be created
        pWeakHypothesisSource->initLearningOptions(args);

        BaseLearner* pConstantWeakHypothesisSource = 
            BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

        // get the training input data, and load it

        InputData* pTrainingData = pWeakHypothesisSource->createInputData();
        pTrainingData->initOptions(args);
        pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);

        const int numClasses = pTrainingData->getNumClasses();
        const int numExamples = pTrainingData->getNumExamples();
                
        //initialize the margins variable
        _margins.resize( numExamples );
        for( int i=0; i<numExamples; i++ )
        {
            _margins[i].resize( numClasses );
            fill( _margins[i].begin(), _margins[i].end(), 0.0 );
        }


        // get the testing input data, and load it
        InputData* pTestData = NULL;
        if ( !_testFileName.empty() )
        {
            pTestData = pWeakHypothesisSource->createInputData();
            pTestData->initOptions(args);
            pTestData->load(_testFileName, IT_TEST, _verbose);
        }

        // The output information object
        OutputInfo* pOutInfo = NULL;


        if ( !_outputInfoFile.empty() ) 
        {
            // Baseline: constant classifier - goes into 0th iteration

            BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
            pConstantWeakHypothesis->initLearningOptions(args);
            pConstantWeakHypothesis->setTrainingData(pTrainingData);
            AlphaReal constantEnergy = pConstantWeakHypothesis->run();

            pOutInfo = new OutputInfo(args);
            pOutInfo->initialize(pTrainingData);

            updateMargins( pTrainingData, pConstantWeakHypothesis );

            if (pTestData)
                pOutInfo->initialize(pTestData);
            pOutInfo->outputHeader(pTrainingData->getClassMap() );

            pOutInfo->outputIteration(-1);
            pOutInfo->outputCustom(pTrainingData, pConstantWeakHypothesis);

            if (pTestData)
            {
                pOutInfo->separator();
                pOutInfo->outputCustom(pTestData, pConstantWeakHypothesis);
            }
                        
            pOutInfo->outputCurrentTime();

            pOutInfo->endLine();
            pOutInfo->initialize(pTrainingData);
                        
            if (pTestData)
                pOutInfo->initialize(pTestData);
        }
        // reload the previously found weak learners if -resume is set. 
        // otherwise just return 0
        int startingIteration = resumeWeakLearners(pTrainingData);


        Serialization ss(_shypFileName, _isShypCompressed );
        ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called

        // perform the resuming if necessary. If not it will just return
        resumeProcess(ss, pTrainingData, pTestData, pOutInfo);

        if (_verbose == 1)
            cout << "Learning in progress..." << endl;
                                
        ///////////////////////////////////////////////////////////////////////
        // Starting the AdaBoost main loop
        ///////////////////////////////////////////////////////////////////////
        for (int t = startingIteration; t < _numIterations; ++t)
        {                       
            if (_verbose > 1)
                cout << "------- WORKING ON ITERATION " << (t+1) << " -------" << endl;
                
            // create the weak learner
            BaseLearner* pWeakHypothesis;
            BaseLearner* pConstantWeakHypothesis;
            pWeakHypothesis = pWeakHypothesisSource->create();
            pWeakHypothesis->initLearningOptions(args);
            //pTrainingData->clearIndexSet();
            pWeakHypothesis->setTrainingData(pTrainingData);
            AlphaReal edge, energy=0.0;
                        
            // create the constant learner
            pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
            pConstantWeakHypothesis->initLearningOptions(args);
            pConstantWeakHypothesis->setTrainingData(pTrainingData);
            AlphaReal constantEdge = -numeric_limits<AlphaReal>::max();
                        
            int currentNumberOfUsedData = static_cast<int>(_Cn * log(t+3.0));
                        
            if ( _onlineWeakLearning )
            {
                //check whether the weak learner is a ScalarLeaerner
                try {
                    StochasticLearner* pStochasticLearner = dynamic_cast<StochasticLearner*>(pWeakHypothesis);
                    StochasticLearner* pStochasticConstantWeakHypothesis = dynamic_cast<StochasticLearner*> (pConstantWeakHypothesis);
                                        
                    pStochasticLearner->initLearning();
                    pStochasticConstantWeakHypothesis->initLearning();                                                                              
                                        
                    if (_verbose>1)
                        cout << "Number of random instances: \t" << currentNumberOfUsedData << endl;
                                        
                    // set the weights
                    setWeightToMargins(pTrainingData);
                                        
                    //learning
                    for (int i=0; i<currentNumberOfUsedData; ++i )
                    {
                        int randomIndex = (rand() % pTrainingData->getNumExamples());   
                        //int randomIndex = getRandomIndex();
                        pStochasticLearner->update(randomIndex);
                        pStochasticConstantWeakHypothesis->update(randomIndex);
                    }                                       
                    pStochasticLearner->finishLearning();           
                    pStochasticConstantWeakHypothesis->finishLearning();
                }
                catch (bad_cast& e) {
                    cerr << "The weak learner must be a StochasticLearner!!!" << endl;
                    exit(-1);
                }                                                                                               
            }
            else
            {
                filter( pTrainingData, currentNumberOfUsedData );
                if ( pTrainingData->getNumExamples() < 2 ) 
                {
                    filter( pTrainingData, currentNumberOfUsedData, false );
                }
                                
                if (_verbose > 1)
                {
                    cout << "--> Size of training data = " << pTrainingData->getNumExamples() << endl;
                }
                                
                energy = pWeakHypothesis->run();                                                                
                pConstantWeakHypothesis->run(); 
            }                       

            //estimate edge
            filter( pTrainingData, currentNumberOfUsedData, false );
            edge = pWeakHypothesis->getEdge(true) / 2.0;                                            
            constantEdge = pConstantWeakHypothesis->getEdge() / 2.0;
                        
                        
            if ( constantEdge > edge )
            {
                delete pWeakHypothesis;
                pWeakHypothesis = pConstantWeakHypothesis;
                edge = constantEdge;
            } else {
                delete pConstantWeakHypothesis;
            }
                                                                        
            // calculate alpha
            AlphaReal alpha = 0.0;
            alpha = 0.5 * log( ( 1 + edge ) / ( 1 - edge ) );
            pWeakHypothesis->setAlpha( alpha );
            _sumAlpha += alpha;
                        
            if (_verbose > 1)
                cout << "Weak learner: " << pWeakHypothesis->getName()<< endl;
            // Output the step-by-step information
            pTrainingData->clearIndexSet();
            printOutputInfo(pOutInfo, t, pTrainingData, pTestData, pWeakHypothesis);

            // Updates the weights and returns the edge
            //AlphaReal gamma = updateWeights(pTrainingData, pWeakHypothesis);

            if (_verbose > 1)
            {
                cout << setprecision(5)
                     << "--> Alpha = " << pWeakHypothesis->getAlpha() << endl
                     << "--> Edge  = " << edge << endl
                     << "--> Energy  = " << energy << endl
                    //            << "--> ConstantEnergy  = " << constantEnergy << endl
                    //            << "--> difference  = " << (energy - constantEnergy) << endl
                    ;
            }

            // update the margins
            //saveMargins();
            updateMargins( pTrainingData, pWeakHypothesis );
                        
            // append the current weak learner to strong hypothesis file,
            // that is, serialize it.
            ss.appendHypothesis(t, pWeakHypothesis);

            // Add it to the internal list of weak hypotheses
            _foundHypotheses.push_back(pWeakHypothesis); 

            // check if the time limit has been reached
            if (_maxTime > 0)
            {
                time( &currentTime );
                float diff = difftime(currentTime, startTime); // difftime is in seconds
                diff /= 60; // = minutes

                if (diff > _maxTime)
                {
                    if (_verbose > 0)
                        cout << "Time limit of " << _maxTime 
                             << " minutes has been reached!" << endl;
                    break;     
                }
            } // check for maxtime
            delete pWeakHypothesis;
        }  // loop on iterations
        /////////////////////////////////////////////////////////

        // write the footer of the strong hypothesis file
        ss.writeFooter();

        // Free the two input data objects
        if (pTrainingData)
            delete pTrainingData;
        if (pTestData)
            delete pTestData;

        if (pOutInfo)
            delete pOutInfo;

        if (_verbose > 0)
            cout << "Learning completed." << endl;
    }
コード例 #4
0
ファイル: TreeLearnerUCT.cpp プロジェクト: junjiek/cmu-exp
void TreeLearnerUCT::calculateChildrenAndEnergies( NodePointUCT& bLearner, int depthIndex ) {
    bLearner._extended = true;
    _pTrainingData->loadIndexSet( bLearner._learnerIdxSet );

    //separate the dataset
    set< int > idxPos, idxNeg;
    idxPos.clear();
    idxNeg.clear();
    float phix;

    for (int i = 0; i < _pTrainingData->getNumExamples(); ++i) {
        // this returns the phi value of classifier
        phix = bLearner._learner->classify(_pTrainingData,i,0);
        if ( phix <  0 )
            idxNeg.insert( _pTrainingData->getRawIndex( i ) );
        else if ( phix > 0 ) { // have to redo the multiplications, haven't been tested
            idxPos.insert( _pTrainingData->getRawIndex( i ) );
        }
    }

    if ( (idxPos.size() < 1 ) || (idxNeg.size() < 1 ) ) {
        //retval.clear();
        bLearner._extended = false;
        //return retval;
    }

    _pTrainingData->loadIndexSet( idxPos );

    if ( ! _pTrainingData->isSamplesFromOneClass() ) {
        BaseLearner* posLearner = _baseLearners[0]->copyState();

        //posLearner->run();
        dynamic_cast<FeaturewiseLearner*>(posLearner)->run( depthIndex );
        //
        //float posEdge = getEdge( posLearner, _pTrainingData );
        posLearner->setTrainingData( _pTrainingData );
        bLearner._leftEdge = posLearner->getEdge();

        //tmpPair.first = posEdge;
        //tmpPair.second.first.first = posLearner;
        bLearner._leftChild = posLearner;
        //set the parent idx to zero
        //tmpPair.second.first.second.first = 0;
        //this means that it will be a left child in the tree
        //tmpPair.second.first.second.second = 0;
        //tmpPair.second.second = idxPos;
        bLearner._leftChildIdxSet = idxPos;
    } else {
        BaseLearner* pConstantWeakHypothesisSource =
            BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

        BaseLearner* posLearner = pConstantWeakHypothesisSource->create();
        posLearner->setTrainingData(_pTrainingData);
        //float constantEnergy = posLearner->run();
        dynamic_cast<FeaturewiseLearner*>(posLearner)->run( depthIndex );

        //BaseLearner* posLearner = _baseLearners[0]->copyState();
        //float posEdge = getEdge( posLearner, _pTrainingData );
        posLearner->setTrainingData( _pTrainingData );
        bLearner._leftEdge = posLearner->getEdge();

        //tmpPair.first = posEdge;
        //tmpPair.second.first.first = posLearner;
        bLearner._leftChild = posLearner;
        //set the parent idx to zero
        //tmpPair.second.first.second.first = 0;
        //this means that it will be a left child in the tree
        //tmpPair.second.first.second.second = 0;
        //tmpPair.second.second = idxPos;
        bLearner._leftChildIdxSet = idxPos;
    }

    //retval.push_back( tmpPair );

    _pTrainingData->loadIndexSet( idxNeg );

    if ( ! _pTrainingData->isSamplesFromOneClass() ) {
        BaseLearner* negLearner = _baseLearners[0]->copyState();


        //negLearner->run();
        dynamic_cast<FeaturewiseLearner*>(negLearner)->run( depthIndex );
        //float negEdge = getEdge( negLearner, _pTrainingData );

        negLearner->setTrainingData( _pTrainingData );
        bLearner._rightEdge = negLearner->getEdge();
        //tmpPair.first = negEdge;
        //tmpPair.second.first.first = negLearner;
        bLearner._rightChild = negLearner;
        //set the parent idx to zero
        //tmpPair.second.first.second.first = 0;
        //this means that it will be a right child in the tree
        //tmpPair.second.first.second.second = 1;
        //tmpPair.second.second = idxNeg;
        bLearner._rightChildIdxSet = idxNeg;
    } else {
        BaseLearner* pConstantWeakHypothesisSource =
            BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

        BaseLearner* negLearner =  pConstantWeakHypothesisSource->create();
        negLearner->setTrainingData(_pTrainingData);
        //float constantEnergy = negLearner->run();
        dynamic_cast<FeaturewiseLearner*>(negLearner)->run( depthIndex );

        //tmpPair.first = getEdge( negLearner, _pTrainingData );;
        bLearner._rightChild = negLearner;
        bLearner._rightChild = negLearner;
        //tmpPair.second.first.first = negLearner;
        //set the parent idx to zero
        //tmpPair.second.first.second.first = 0;
        //this means that it will be a right child in the tree
        //tmpPair.second.first.second.second = 1;
        //tmpPair.second.second = idxNeg;
        bLearner._rightChildIdxSet = idxNeg;
    }

    //retval.push_back( tmpPair );

    //return retval;
}
コード例 #5
0
ファイル: TreeLearnerUCT.cpp プロジェクト: junjiek/cmu-exp
AlphaReal TreeLearnerUCT::run()
{
    if ( _numOfCalling == 0 ) {
        if (_verbose > 0) {
            cout << "Initializing tree..." << endl;
        }
        InnerNodeUCTSparse::setDepth( _numBaseLearners );
        InnerNodeUCTSparse::setBranchOrder( _pTrainingData->getNumAttributes() );
        _root.setChildrenNum();
        //createUCTTree();
    }
    _numOfCalling++;

    set< int > tmpIdx, idxPos, idxNeg;

    _pTrainingData->clearIndexSet();
    for( int i = 0; i < _pTrainingData->getNumExamples(); i++ ) tmpIdx.insert( i );


    vector< int > trajectory(0);
    _root.getBestTrajectory( trajectory );

    // for UCT

    for(int ib = 0; ib < _numBaseLearners; ++ib)
        _baseLearners[ib]->setTrainingData(_pTrainingData);

    AlphaReal edge = numeric_limits<AlphaReal>::max();

    BaseLearner* pPreviousBaseLearner = 0;
    //floatBaseLearner tmpPair, tmpPairPos, tmpPairNeg;

    // for storing the inner point (learneres) which will be extended
    //vector< floatBaseLearner > bLearnerVector;
    InnerNodeType innerNode;
    priority_queue<InnerNodeType, deque<InnerNodeType>, greater_first<InnerNodeType> > pq;




    //train the first learner
    //_baseLearners[0]->run();
    pPreviousBaseLearner = _baseLearners[0]->copyState();
    dynamic_cast<FeaturewiseLearner*>(pPreviousBaseLearner)->run( trajectory[0] );

    //this contains the number of baselearners
    int ib = 0;

    NodePointUCT tmpNodePoint, nodeLeft, nodeRight;

    ////////////////////////////////////////////////////////
    //set the edge
    //tmpPair.first = getEdge( pPreviousBaseLearner, _pTrainingData );


    //tmpPair.second.first.first = pPreviousBaseLearner;
    // set the pointer of the parent
    //tmpPair.second.first.second.first = 0;
    // set that this is a neg child
    //tmpPair.second.first.second.second = 0;
    //tmpPair.second.second = tmpIdx;
    //bLearnerVector = calculateChildrenAndEnergies( tmpPair );


    ///
    pPreviousBaseLearner->setTrainingData( _pTrainingData );
    tmpNodePoint._edge = pPreviousBaseLearner->getEdge();
    tmpNodePoint._learner = pPreviousBaseLearner;
    tmpNodePoint._idx = 0;
    tmpNodePoint._depth = 0;
    tmpNodePoint._learnerIdxSet = tmpIdx;
    calculateChildrenAndEnergies( tmpNodePoint, trajectory[1] );

    ////////////////////////////////////////////////////////

    //insert the root into the priority queue

    if ( tmpNodePoint._extended )
    {
        if (_verbose > 2) {
            //cout << "Edges: (parent, pos, neg): " << bLearnerVector[0].first << " " << bLearnerVector[1].first << " " << bLearnerVector[2].first << endl << flush;
            //cout << "alpha[" << (ib) <<  "] = " << _alpha << endl << flush;
            cout << "Edges: (parent, pos, neg): " << tmpNodePoint._edge << " " << tmpNodePoint._leftEdge << " " << tmpNodePoint._rightEdge << endl << flush;
        }

        // if the energy is getting higher then we push it into the priority queue
        if ( tmpNodePoint._edge < ( tmpNodePoint._leftEdge + tmpNodePoint._rightEdge ) ) {
            float deltaEdge = abs(  tmpNodePoint._edge - ( tmpNodePoint._leftEdge + tmpNodePoint._rightEdge ) );
            innerNode.first = deltaEdge;
            innerNode.second = tmpNodePoint;
            pq.push( innerNode );
        } else {
            //delete bLearnerVector[0].second.first.first;
            delete tmpNodePoint._leftChild;
            delete tmpNodePoint._rightChild;
        }

    }

    if ( pq.empty() ) {
        // we don't extend the root
        BaseLearner* tmpBL = _baseLearners[0];
        _baseLearners[0] = tmpNodePoint._learner;
        delete tmpBL;
        ib = 1;
    }







    while ( ! pq.empty() && ( ib < _numBaseLearners ) ) {
        //get the best learner from the priority queue
        innerNode = pq.top();

        if (_verbose > 2) {
            cout << "Delta energy: " << innerNode.first << endl << flush;
            cout << "Size of priority queue: " << pq.size() << endl << flush;
        }

        pq.pop();
        tmpNodePoint = innerNode.second;

        //tmpPair = bLearnerVector[0];
        //tmpPairPos = bLearnerVector[1];
        //tmpPairNeg = bLearnerVector[2];

        nodeLeft._edge = tmpNodePoint._leftEdge;
        nodeLeft._learner = tmpNodePoint._leftChild;
        nodeLeft._learnerIdxSet = tmpNodePoint._leftChildIdxSet;


        nodeRight._edge = tmpNodePoint._rightEdge;
        nodeRight._learner = tmpNodePoint._rightChild;
        nodeRight._learnerIdxSet = tmpNodePoint._rightChildIdxSet;


        //store the baselearner if the deltaenrgy will be higher
        if ( _verbose > 3 ) {
            cout << "Insert learner: " << ib << endl << flush;
        }
        int parentIdx = tmpNodePoint._parentIdx;
        BaseLearner* tmpBL = _baseLearners[ib];
        _baseLearners[ib] = tmpNodePoint._learner;
        delete tmpBL;

        nodeLeft._parentIdx = ib;
        nodeRight._parentIdx = ib;
        nodeLeft._leftOrRightChild = 0;
        nodeRight._leftOrRightChild = 1;

        nodeLeft._depth = tmpNodePoint._depth + 1;
        nodeRight._depth = tmpNodePoint._depth + 1;

        if ( ib > 0 ) {
            //set the descendant idx
            _idxPairs[ parentIdx ][ tmpNodePoint._leftOrRightChild ] = ib;
        }

        ib++;

        if ( ib >= _numBaseLearners ) break;



        //extend positive node
        if ( nodeLeft._learner ) {
            calculateChildrenAndEnergies( nodeLeft, trajectory[ nodeLeft._depth + 1 ] );
        } else {
            nodeLeft._extended = false;
        }


        //calculateChildrenAndEnergies( nodeLeft );

        // if the energie is getting higher then we push it into the priority queue
        if ( nodeLeft._extended )
        {
            if (_verbose > 2) {
                //cout << "Edges: (parent, pos, neg): " << bLearnerVector[0].first << " " << bLearnerVector[1].first << " " << bLearnerVector[2].first << endl << flush;
                //cout << "alpha[" << (ib) <<  "] = " << _alpha << endl << flush;
                cout << "Edges: (parent, pos, neg): " << nodeLeft._edge << " " << nodeLeft._leftEdge << " " << nodeLeft._rightEdge << endl << flush;
            }

            // if the energy is getting higher then we push it into the priority queue
            if ( nodeLeft._edge < ( nodeLeft._leftEdge + nodeLeft._rightEdge ) ) {
                float deltaEdge = abs(  nodeLeft._edge - ( nodeLeft._leftEdge + nodeLeft._rightEdge ) );
                innerNode.first = deltaEdge;
                innerNode.second = nodeLeft;
                pq.push( innerNode );
            } else {
                //delete bLearnerVector[0].second.first.first;
                delete nodeLeft._leftChild;
                delete nodeLeft._rightChild;

                if ( ib >= _numBaseLearners ) {
                    delete nodeLeft._learner;
                    break;
                } else {
                    //this will be a leaf, we do not extend further
                    int parentIdx = nodeLeft._parentIdx;
                    BaseLearner* tmpBL = _baseLearners[ib];
                    _baseLearners[ib] = nodeLeft._learner;
                    delete tmpBL;
                    _idxPairs[ parentIdx ][ 0 ] = ib;
                    ib += 1;
                }
            }
        }



        //extend negative node
        if ( nodeRight._learner ) {
            calculateChildrenAndEnergies( nodeRight, trajectory[ nodeRight._depth + 1 ] );
        } else {
            nodeRight._extended = false;
        }



        // if the energie is getting higher then we push it into the priority queue
        if ( nodeRight._extended )
        {
            if (_verbose > 2) {
                cout << "Edges: (parent, pos, neg): " << nodeRight._edge << " " << nodeRight._leftEdge << " " << nodeRight._rightEdge << endl << flush;
                //cout << "alpha[" << (ib) <<  "] = " << _alpha << endl << flush;
            }

            // if the energie is getting higher then we push it into the priority queue
            if ( nodeRight._edge < ( nodeRight._leftEdge + nodeRight._rightEdge ) ) {
                float deltaEdge = abs(  nodeRight._edge - ( nodeRight._leftEdge + nodeRight._rightEdge ) );
                innerNode.first = deltaEdge;
                innerNode.second = nodeRight;
                pq.push( innerNode );
            } else {
                //delete bLearnerVector[0].second.first.first;
                delete nodeRight._leftChild;
                delete nodeRight._rightChild;

                if ( ib >= _numBaseLearners ) {
                    delete nodeRight._learner;
                    break;
                } else {
                    //this will be a leaf, we do not extend further
                    int parentIdx = nodeRight._parentIdx;
                    BaseLearner* tmpBL = _baseLearners[ib];
                    _baseLearners[ib] = nodeRight._learner;
                    delete tmpBL;
                    _idxPairs[ parentIdx ][ 1 ] = ib;
                    ib += 1;
                }


            }
        }

    }


    for(int ib2 = ib; ib2 < _numBaseLearners; ++ib2) delete _baseLearners[ib2];
    _numBaseLearners = ib;

    if (_verbose > 2) {
        cout << "Num of learners: " << _numBaseLearners << endl << flush;
    }

    //clear the priority queur
    while ( ! pq.empty() && ( ib < _numBaseLearners ) ) {
        //get the best learner from the priority queue
        innerNode = pq.top();
        pq.pop();
        tmpNodePoint = innerNode.second;

        delete tmpNodePoint._learner;
        delete tmpNodePoint._leftChild;
        delete tmpNodePoint._rightChild;
    }

    _id = _baseLearners[0]->getId();
    for(int ib = 0; ib < _numBaseLearners; ++ib)
        _id += "_x_" + _baseLearners[ib]->getId();

    //calculate alpha
    this->_alpha = 0.0;
    float eps_min = 0.0, eps_pls = 0.0;

    _pTrainingData->clearIndexSet();
    for( int i = 0; i < _pTrainingData->getNumExamples(); i++ ) {
        vector< Label> l = _pTrainingData->getLabels( i );
        for( vector< Label >::iterator it = l.begin(); it != l.end(); it++ ) {
            float result  = this->classify( _pTrainingData, i, it->idx );

            if ( ( result * it->y ) < 0 ) eps_min += it->weight;
            if ( ( result * it->y ) > 0 ) eps_pls += it->weight;
        }

    }



    //update the weights in the UCT tree
    double updateWeight = 0.0;
    if ( _updateRule == EDGE_SQUARE ) {
        double edge = getEdge();
        updateWeight = 1 - sqrt( 1 - ( edge * edge ) );
    } else if ( _updateRule == ALPHAS ) {
        double alpha = this->getAlpha();
        updateWeight = alpha;
    } else if ( _updateRule == ESQUARE ) {
        double edge = getEdge();
        updateWeight = edge * edge;
    }


    _root.updateInnerNodes( updateWeight, trajectory );

    if (_verbose > 2) {
        cout << "Update weight (" <<  updateWeight << ")" << "\tUpdate rule index: "<< _updateRule << endl << flush;
    }


    // set the smoothing value to avoid numerical problem
    // when theta=0.
    setSmoothingVal( (float)1.0 / (float)_pTrainingData->getNumExamples() * (float)0.01 );

    this->_alpha = getAlpha( eps_min, eps_pls );

    // calculate the energy (sum of the energy of the leaves
    float energy = this->getEnergy( eps_min, eps_pls );

    return energy;
}
コード例 #6
0
ファイル: AdaBoostMHLearner.cpp プロジェクト: busarobi/MDDAG2
	// -------------------------------------------------------------------------
	void AdaBoostMHLearner::run( const nor_utils::Args& args, InputData* pTrainingData, const string baseLearnerName, const int numIterations, vector<BaseLearner*>& foundHypotheses )
	{
		
		// get the registered weak learner (type from name)
		BaseLearner* pWeakHypothesisSource = 
		BaseLearner::RegisteredLearners().getLearner(baseLearnerName);
		// initialize learning options; normally it's done in the strong loop
		// also, here we do it for Product learners, so input data can be created
		pWeakHypothesisSource->initLearningOptions(args);
		
		BaseLearner* pConstantWeakHypothesisSource = 
		BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
		
							
		if (_verbose == 1)
			cout << "Learning in progress..." << endl;
		
		
		///////////////////////////////////////////////////////////////////////
		// Starting the AdaBoost main loop
		///////////////////////////////////////////////////////////////////////
		for (int t = 0; t < numIterations; ++t)
		{
			if ((_verbose > 0)&&((t%100)==0))
				cout << "--------------[ Boosting iteration " << (t+1) << " ]--------------" << endl;				
			
			BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create();
			pWeakHypothesis->initLearningOptions(args);
			//pTrainingData->clearIndexSet();
			
			pWeakHypothesis->setTrainingData(pTrainingData);
			
			AlphaReal energy = pWeakHypothesis->run();
			
			//float gamma = pWeakHypothesis->getEdge();
			//cout << gamma << endl;
			
			if ( (_withConstantLearner) || ( energy != energy ) ) // check constant learner if user wants it (if energi is nan, then we chose constant learner
			{
				BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
				pConstantWeakHypothesis->initLearningOptions(args);
				pConstantWeakHypothesis->setTrainingData(pTrainingData);
				AlphaReal constantEnergy = pConstantWeakHypothesis->run();
				
				if ( (constantEnergy <= energy) || ( energy != energy ) ) {
					delete pWeakHypothesis;
					pWeakHypothesis = pConstantWeakHypothesis;
				}
			}
			
			if (_verbose > 1)
				cout << "Weak learner: " << pWeakHypothesis->getName()<< endl;
			
			// Updates the weights and returns the edge
			AlphaReal gamma = updateWeights(pTrainingData, pWeakHypothesis);
			
			if (_verbose > 1)
			{
				cout << setprecision(5)
				<< "--> Alpha = " << pWeakHypothesis->getAlpha() << endl
				<< "--> Edge  = " << gamma << endl
				<< "--> Energy  = " << energy << endl
				//            << "--> ConstantEnergy  = " << constantEnergy << endl
				//            << "--> difference  = " << (energy - constantEnergy) << endl
				;
			}
			
			// If gamma <= theta the algorithm must stop.
			// If theta == 0 and gamma is 0, it means that the weak learner is no better than chance
			// and no further training is possible.
			if (gamma <= _theta)
			{
				if (_verbose > 0)
				{
					cout << "Can't train any further: edge = " << gamma 
					<< " (with and edge offset (theta)=" << _theta << ")" << endl;
				}
				
				//          delete pWeakHypothesis;
				//          break; 
			}
						
			// Add it to the internal list of weak hypotheses
			foundHypotheses.push_back(pWeakHypothesis); 
			
		}  // loop on iterations
		/////////////////////////////////////////////////////////
		
		if (_verbose > 0)
			cout << "--------------[ AdaBoost Learning completed. ]--------------" << endl;
	}
コード例 #7
0
ファイル: AdaBoostMHLearner.cpp プロジェクト: busarobi/MDDAG2
	void AdaBoostMHLearner::run(const nor_utils::Args& args)
	{
		// load the arguments
		this->getArgs(args);

		// get the registered weak learner (type from name)
		BaseLearner* pWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner(_baseLearnerName);
		// initialize learning options; normally it's done in the strong loop
		// also, here we do it for Product learners, so input data can be created
		pWeakHypothesisSource->initLearningOptions(args);

		BaseLearner* pConstantWeakHypothesisSource = 
			BaseLearner::RegisteredLearners().getLearner("ConstantLearner");

		// get the training input data, and load it

		InputData* pTrainingData = pWeakHypothesisSource->createInputData();
		pTrainingData->initOptions(args);
		pTrainingData->load(_trainFileName, IT_TRAIN, _verbose);
		
		// get the testing input data, and load it
		InputData* pTestData = NULL;
		if ( !_testFileName.empty() )
		{
			pTestData = pWeakHypothesisSource->createInputData();
			pTestData->initOptions(args);
			pTestData->load(_testFileName, IT_TEST, _verbose);
		}

		// The output information object
		OutputInfo* pOutInfo = NULL;


		if ( !_outputInfoFile.empty() ) 
		{
			// Baseline: constant classifier - goes into 0th iteration

			BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
			pConstantWeakHypothesis->initLearningOptions(args);
			pConstantWeakHypothesis->setTrainingData(pTrainingData);
			AlphaReal constantEnergy = pConstantWeakHypothesis->run();

			//pOutInfo = new OutputInfo(_outputInfoFile);
            pOutInfo = new OutputInfo(args);
			pOutInfo->initialize(pTrainingData);

			if (pTestData)
				pOutInfo->initialize(pTestData);
			pOutInfo->outputHeader(pTrainingData->getClassMap());

			pOutInfo->outputIteration(-1);
            pOutInfo->outputCustom(pTrainingData, pConstantWeakHypothesis);
            
			if (pTestData != NULL)
            {
                pOutInfo->separator();
                pOutInfo->outputCustom(pTestData, pConstantWeakHypothesis);   
            }

			pOutInfo->outputCurrentTime();

			pOutInfo->endLine(); 
			pOutInfo->initialize(pTrainingData);

			if (pTestData)
				pOutInfo->initialize(pTestData);
		}
		//cout << "Before serialization" << endl;
		// reload the previously found weak learners if -resume is set. 
		// otherwise just return 0
		int startingIteration = resumeWeakLearners(pTrainingData);


		Serialization ss(_shypFileName, _isShypCompressed );
		ss.writeHeader(_baseLearnerName); // this must go after resumeProcess has been called

		// perform the resuming if necessary. If not it will just return
		resumeProcess(ss, pTrainingData, pTestData, pOutInfo);

		if (_verbose == 1)
			cout << "Learning in progress..." << endl;

		//I put here the starting time, but it may take very long time to load the saved model
		time_t startTime, currentTime;
		time(&startTime);

		///////////////////////////////////////////////////////////////////////
		// Starting the AdaBoost main loop
		///////////////////////////////////////////////////////////////////////
		for (int t = startingIteration; t < _numIterations; ++t)
		{
			if (_verbose > 1)
				cout << "------- WORKING ON ITERATION " << (t+1) << " -------" << endl;

			BaseLearner* pWeakHypothesis = pWeakHypothesisSource->create();
			pWeakHypothesis->initLearningOptions(args);
			//pTrainingData->clearIndexSet();

			pWeakHypothesis->setTrainingData(pTrainingData);
			
			AlphaReal energy = pWeakHypothesis->run();
			
			//float gamma = pWeakHypothesis->getEdge();
			//cout << gamma << endl;

			if ( (_withConstantLearner) || ( energy != energy ) ) // check constant learner if user wants it (if energi is nan, then we chose constant learner
			{
				BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
				pConstantWeakHypothesis->initLearningOptions(args);
				pConstantWeakHypothesis->setTrainingData(pTrainingData);
				AlphaReal constantEnergy = pConstantWeakHypothesis->run();

				if ( (constantEnergy <= energy) || ( energy != energy ) ) {
					delete pWeakHypothesis;
					pWeakHypothesis = pConstantWeakHypothesis;
				}
			}

			if (_verbose > 1)
				cout << "Weak learner: " << pWeakHypothesis->getName()<< endl;
			// Output the step-by-step information
			printOutputInfo(pOutInfo, t, pTrainingData, pTestData, pWeakHypothesis);

			// Updates the weights and returns the edge
			AlphaReal gamma = updateWeights(pTrainingData, pWeakHypothesis);

			if (_verbose > 1)
			{
				cout << setprecision(5)
					<< "--> Alpha = " << pWeakHypothesis->getAlpha() << endl
					<< "--> Edge  = " << gamma << endl
					<< "--> Energy  = " << energy << endl
					//            << "--> ConstantEnergy  = " << constantEnergy << endl
					//            << "--> difference  = " << (energy - constantEnergy) << endl
					;
			}

			// If gamma <= theta the algorithm must stop.
			// If theta == 0 and gamma is 0, it means that the weak learner is no better than chance
			// and no further training is possible.
			if (gamma <= _theta)
			{
				if (_verbose > 0)
				{
					cout << "Can't train any further: edge = " << gamma 
						<< " (with and edge offset (theta)=" << _theta << ")" << endl;
				}

				//          delete pWeakHypothesis;
				//          break; 
			}

			// append the current weak learner to strong hypothesis file,
			// that is, serialize it.
			ss.appendHypothesis(t, pWeakHypothesis);

			// Add it to the internal list of weak hypotheses
			_foundHypotheses.push_back(pWeakHypothesis); 

			// check if the time limit has been reached
			if (_maxTime > 0)
			{
				time( &currentTime );
				float diff = difftime(currentTime, startTime); // difftime is in seconds
				diff /= 60; // = minutes

				if (diff > _maxTime)
				{
					if (_verbose > 0)
						cout << "Time limit of " << _maxTime 
						<< " minutes has been reached!" << endl;
					break;     
				}
			} // check for maxtime
			delete pWeakHypothesis;
		}  // loop on iterations
		/////////////////////////////////////////////////////////

		// write the footer of the strong hypothesis file
		ss.writeFooter();

		// write the weights of the instances if the name of weights file isn't empty
		printOutWeights( pTrainingData );


		// Free the two input data objects
		if (pTrainingData)
			delete pTrainingData;
		if (pTestData)
			delete pTestData;

		if (pOutInfo)
			delete pOutInfo;

		if (_verbose > 0)
			cout << "Learning completed." << endl;
	}
コード例 #8
0
ファイル: BanditProductLearner.cpp プロジェクト: ShenWei/src
	float BanditProductLearner::run()
	{
		if ( ! this->_banditAlgo->isInitialized() ) {
			init();
		}
		// the bandit algorithm selects the subset the tree learner is allowed to use
		// the armindexes will be stored in _armsForPulling
		getArms();

		const int numClasses = _pTrainingData->getNumClasses();
		const int numExamples = _pTrainingData->getNumExamples();

		// Backup original labels
		for (int i = 0; i < numExamples; ++i) {
			const vector<Label>& labels = _pTrainingData->getLabels(i);
			vector<char> exampleLabels;
			for (int l = 0; l < numClasses; ++l)
				exampleLabels.push_back(labels[l].y);
			_savedLabels.push_back(exampleLabels);
		}

		for(int ib = 0; ib < _numBaseLearners; ++ib)
			_baseLearners[ib]->setTrainingData(_pTrainingData);

		float energy = numeric_limits<float>::max();
		float previousEnergy, hx, previousAlpha;
		BaseLearner* pPreviousBaseLearner = 0;

		bool firstLoop = true;
		int ib = -1;
		while (1) {
			ib += 1;
			if (ib >= _numBaseLearners) {
				ib = 0;
				firstLoop = false;
			}
			previousEnergy = energy;
			previousAlpha = _alpha;
			if (pPreviousBaseLearner)
				delete pPreviousBaseLearner;
			if ( !firstLoop ) {
				// take the old learner off the labels
				for (int i = 0; i < numExamples; ++i) {
					vector<Label>& labels = _pTrainingData->getLabels(i);
					for (int l = 0; l < numClasses; ++l) {
						// Here we could have the option of using confidence rated setting so the
						// real valued output of classify instead of its sign
						hx = _baseLearners[ib]->classify(_pTrainingData,i,l);
						if ( hx < 0 )
							labels[l].y *= -1;
						else if ( hx == 0 ) { // have to redo the multiplications, haven't been tested
							for(int ib1 = 0; ib1 < _numBaseLearners && labels[l].y != 0; ++ib1) {
								if (ib != ib1) {
									hx = _baseLearners[ib1]->classify(_pTrainingData,i,l);
									if (hx < 0)
										labels[l].y *= -1;
									else if (hx == 0)
										labels[l].y = 0;
								}
							}
						}
					}
				}
			}
			pPreviousBaseLearner = _baseLearners[ib]->copyState();
			energy = dynamic_cast< FeaturewiseLearner* >(_baseLearners[ib])->run(_armsForPulling );
			// check if it is signailing_nan
			if ( energy != energy )
			{
				if (_verbose > 2) {
					cout << "Cannot find weak hypothesis, constant learner is used!!" << endl;
				}
				BaseLearner* pConstantWeakHypothesisSource = 
					BaseLearner::RegisteredLearners().getLearner("ConstantLearner");
				BaseLearner* pConstantWeakHypothesis = pConstantWeakHypothesisSource->create() ;
				pConstantWeakHypothesis->setTrainingData( _pTrainingData );
				energy = pConstantWeakHypothesis->run();
				
				delete _baseLearners[ib];
				_baseLearners[ib] = pConstantWeakHypothesis;
				
			}
			_alpha = _baseLearners[ib]->getAlpha();
			if (_verbose > 2) {
				cout << "E[" << (ib+1) <<  "] = " << energy << endl << flush;
				cout << "alpha[" << (ib+1) <<  "] = " << _alpha << endl << flush;
			}
			for (int i = 0; i < numExamples; ++i) {
				vector<Label>& labels = _pTrainingData->getLabels(i);
				for (int l = 0; l < numClasses; ++l) {
					// Here we could have the option of using confidence rated setting so the
					// real valued output of classify instead of its sign
					if (labels[l].y != 0) { // perhaps replace it by nor_utils::is_zero(labels[l].y)
						hx = _baseLearners[ib]->classify(_pTrainingData,i,l);
						if ( hx < 0 )
							labels[l].y *= -1;
						else if ( hx == 0 )
							labels[l].y = 0;
					}
				}
			}

			// We have to do at least one full iteration. For real it's not guaranteed
			// Alternatively we could initialize all of them to constant
			//      if ( !firstLoop && energy >= previousEnergy ) {
			//	 if (energy > previousEnergy) {
			//	    _baseLearners[ib] = pPreviousBaseLearner->copyState();
			//           delete pPreviousBaseLearner;
			//	    energy = previousEnergy;
			//	    _alpha = _baseLearners[ib]->getAlpha();
			//	 }
			//	 break;
			//      }
			if ( energy >= previousEnergy ) {
				_alpha = previousAlpha;
				energy = previousEnergy;
				if (firstLoop) {
					for(int ib2 = ib; ib2 < _numBaseLearners; ++ib2)
						delete _baseLearners[ib2];
					_numBaseLearners = ib;
				}
				else {
					_baseLearners[ib] = pPreviousBaseLearner->copyState();
				}
				delete pPreviousBaseLearner;
				break;
			} 
		}

		// Restore original labels
		for (int i = 0; i < numExamples; ++i) {
			vector<Label>& labels = _pTrainingData->getLabels(i);
			for (int l = 0; l < numClasses; ++l)
				labels[l].y = _savedLabels[i][l];
		}

		_id = _baseLearners[0]->getId();
		for(int ib = 1; ib < _numBaseLearners; ++ib)
			_id += "_x_" + _baseLearners[ib]->getId();

		//bandit part we calculate the reward
		_reward = getRewardFromEdge( getEdge() );
		provideRewardForBanditAlgo();

		return energy;
	}