Ejemplo n.º 1
0
void RateMeyerHaeseler::runIterativeProc(Params &params, IQTree &tree) {
	int i;
	if (verbose_mode >= VB_MED) {
		ofstream out("x");
		out.close();
	}
	setTree(&tree);
	RateHeterogeneity *backup_rate = tree.getRate();
	if (backup_rate->getGammaShape() > 0 ) {
		IntVector pattern_cat;
		backup_rate->computePatternRates(*this, pattern_cat);
		double sum = 0.0;
		for (i = 0; i < size(); i++)
			sum += at(i) * phylo_tree->aln->at(i).frequency;
		sum /=  phylo_tree->aln->getNSite();
		if (fabs(sum - 1.0) > 0.0001) {
			if (verbose_mode >= VB_MED)
				cout << "Normalizing Gamma rates (" << sum << ")" << endl;
			for (i = 0; i < size(); i++)
				at(i) /= sum;
		}
	}
	tree.getModelFactory()->site_rate = this;
	tree.setRate(this);

	
	//if  (empty()) initializeRates();

	//setRates(prev_rates);
	//string rate_file = params.out_prefix;
	//rate_file += ".mhrate";
	double prev_lh = tree.getCurScore();
	string dist_file = params.out_prefix;
	dist_file += ".tdist";
	tree.getModelFactory()->stopStoringTransMatrix();

	for (i = 2; i < 100; i++) {
		//DoubleVector prev_rates;
		//getRates(prev_rates);
		//writeSiteRates(prev_rates, rate_file.c_str());
		tree.setCurScore(optimizeParameters(0.0));
		//phylo_tree->aln->printDist(dist_file.c_str(), dist_mat);
		tree.setCurScore(tree.optimizeAllBranches(i));
		cout << "Current Log-likelihood: " << tree.getCurScore() << endl;
		if (tree.getCurScore() <= prev_lh + 1e-4) {
			break;
		}
		prev_lh = tree.getCurScore();
	}
	cout << "Optimization took " << i-1 << " rounds to finish" << endl;
	tree.getModelFactory()->startStoringTransMatrix();
	//tree.getModelFactory()->site_rate = backup_rate;
	//tree.setRate(backup_rate);
}
Ejemplo n.º 2
0
/********************************************************************************************
optimizations
*********************************************************************************************/
void gainLossOptimizer::optimizations(){
	time_t t1;
	time(&t1);
	time_t t2;

	LOGnOUT(4,<<"-------------------------------"<<endl
		<<"Starting optimizations: maxNumIterations="<<_maxNumOfIterations<<endl);	

	_bestL = likelihoodComputation::getTreeLikelihoodAllPosAlphTheSame(_tr,_sc,*_sp,_weightsUniqPatterns,_unObservableData_p);
	MDOUBLE currBestL=VERYSMALL;
	MDOUBLE previousL;
	bool noLikeImprovmentAtBBL = false; // if BBL did not produce a new tree, end. (no point in another iteration model+BBL)

	bool isSkipParamsOptimization = gainLossOptions::_isSkipFirstParamsOptimization;
	LOGnOUT(3,<<endl<<"#########"<<" optimization starting epsilonCycle="<<_epsilonOptimization<<" maxNumIterations="<<_maxNumOfIterations<<endl);
	LOGnOUT(3,<<"start optimization with L= "<<_bestL<<endl);
	int iter;
	for (iter=1;iter<=_maxNumOfIterations;iter++){
		LOGnOUT(4,<<endl<<"------"<<" Model+BBL iter="<<iter<<endl);
		previousL = _bestL;		// breaking out of loop when no (>epsilon) improvement is made by comparing to previousL
// model optimization
		if(!isSkipParamsOptimization){
			currBestL = optimizeParameters();}
		else{
			LOGnOUT(4,<<"Optimize Params - Skipped"<<endl);}
		
		if (currBestL>_bestL) {
			_bestL = currBestL;
		}
		else if(!isSkipParamsOptimization && currBestL<_bestL){
			LOGnOUT(4,<<" !!! Warning !!!: after model optimization likelihood went down"<< currBestL<<" "<<_bestL<<endl);
		}		
		//_bestL = max(currBestL,_bestL);
		
		isSkipParamsOptimization = false;	// only first iteration skipped
// BBL optimization
		if (gainLossOptions::_performOptimizationsBBL && _performOptimizationsBBL) // we use the && 2 enable optimizationsManyStarts not to perform BBL
		{
			//LOGnOUT(4,<<"Start BBL... with epsilonOptimizationBBL= "<<_epsilonOptimizationBBL<<endl);
			if(gainLossOptions::_performOptimizationsBBLOnlyOnce)	// Next iteration - no BBL
				_performOptimizationsBBL = false;
			currBestL = optimizeBranchLengths(iter);
			if (currBestL>_bestL) {
				_bestL = currBestL;
			}
			else{
				noLikeImprovmentAtBBL = true;
				LOGnOUT(4,<<" !!! Warning !!!: after BBL likelihood did not improve"<< currBestL<<" "<<_bestL<<endl);
			}			
			//_bestL = max(currBestL,_bestL);
			string treeINodes = gainLossOptions::_outDir + "//" + "TheTree.INodes.iter"+ int2string(iter) + ".ph"; 
			printTree (_tr, treeINodes);
		}

// ROOT optimization
		if (gainLossOptions::_performOptimizationsROOT) 
		{
			currBestL = optimizeRoot();
			if (currBestL>_bestL) {
				_bestL = currBestL;
			}
			else{
				LOGnOUT(4,<<" !!! Warning !!!: after Root likelihood did not improve"<< currBestL<<" "<<_bestL<<endl);
			}			
			//_bestL = max(currBestL,_bestL);
		}		
		if ( (_bestL-previousL) < max(_epsilonOptimization, abs(_bestL/10000))  || noLikeImprovmentAtBBL) // stop Opt for less than epsilon likelihood point
		{
			LOGnOUT(3,<<" OverAll optimization converged. Iter= "<<iter<<" Likelihood="<<_bestL<<endl);
			break;
		}