Пример #1
0
optimizeSelectonParameters::optimizeSelectonParameters(tree& et, //find Best params and best BBL
					   const sequenceContainer& sc,
					   vector<stochasticProcess>& spVec,
					   distribution * distr,
					   bool bblFlag,
					   bool isGamma, bool isBetaProbSet,bool isOmegaSet,
					   bool isKappaSet, bool isAlphaSet, bool isBetaSet,
					   const MDOUBLE upperBoundOnAlpha,
					   const MDOUBLE upperBoundOnBeta,
					   const MDOUBLE epsilonAlphaOptimization,
					   const MDOUBLE epsilonKOptimization,
					   const MDOUBLE epsilonLikelihoodImprovment,
					   const int maxBBLIterations,
					   const int maxTotalIterations){
   //initialization	
	MDOUBLE lowerValueOfParamK = 0;
	MDOUBLE lowerValueOfParamAlpha = 0.1;
	MDOUBLE lowerValueOfParamBeta = 0.1;
	MDOUBLE omegaLowerBoundary = 0.99; // this is to allow brent to reach the exact lower bound value
	MDOUBLE omegaUpperBoundary = 5.0; 
	MDOUBLE upperValueOfParamK = 5; // changed from 50, Adi S. 2/1/07 
	
	MDOUBLE initialGuessValueOfParamTr;
	initialGuessValueOfParamTr = _bestK = static_cast<wYangModel*>(spVec[0].getPijAccelerator()->getReplacementModel())->getK();

	MDOUBLE initialGuessValueOfParamAlpha;
	if (isGamma) initialGuessValueOfParamAlpha = _bestAlpha = static_cast<generalGammaDistribution*>(distr)->getAlpha();
	else initialGuessValueOfParamAlpha = _bestAlpha = static_cast<betaOmegaDistribution*>(distr)->getAlpha();
	
	MDOUBLE initialGuessValueOfParamBeta; 
	if (isGamma) initialGuessValueOfParamBeta = _bestBeta = static_cast<generalGammaDistribution*>(distr)->getBeta();
	else initialGuessValueOfParamBeta = _bestBeta = static_cast<betaOmegaDistribution*>(distr)->getBeta();

	MDOUBLE initialGuessValueOfParamOmega = -1;
	MDOUBLE initialGuessValueOfParamBetaProb = -1;
	if (!isGamma) {
		initialGuessValueOfParamOmega = _bestOmega = static_cast<betaOmegaDistribution*>(distr)->getOmega();
		initialGuessValueOfParamBetaProb = _bestBetaProb = static_cast<betaOmegaDistribution*>(distr)->getBetaProb();
	}
	_bestL = likelihoodComputation2Codon::getTreeLikelihoodAllPosAlphTheSame(et,sc,spVec,distr);;
	MDOUBLE newL = _bestL;

	MDOUBLE alphaFound = 0;
	MDOUBLE kFound = 0;
	MDOUBLE betaFound = 0;
	MDOUBLE omegaFound = 0;
	MDOUBLE betaProbFound = 0;
	bool changed = false;
	int i=0;
	LOG(5,<<endl<<"Beginning optimization of parameters"<<endl<<endl);

	for (i=0; i < maxTotalIterations; ++i) {
		LOG(5,<<"Iteration Number= " << i <<endl);
		LOG(5,<<"---------------------"<<endl);		
		cout<<"Iteration number = "<< i <<endl;
		alphaFound = omegaFound = betaProbFound = kFound = betaFound=0;
		changed = false;
//ALPHA (beta or gamma distribution parameter)
		if (!isAlphaSet){
			if (isGamma) initialGuessValueOfParamAlpha = static_cast<generalGammaDistribution*>(distr)->getAlpha();
			else initialGuessValueOfParamAlpha = static_cast<betaOmegaDistribution*>(distr)->getAlpha();
			newL = -brent(lowerValueOfParamAlpha,
						initialGuessValueOfParamAlpha,
						upperBoundOnAlpha,
						evalParam(et,sc,spVec,-1,distr,isGamma),epsilonAlphaOptimization,&alphaFound); 

			LOG(5,<<"current best L= "<<_bestL<<endl<<endl);
			LOG(5,<<"new L After alpha= " << newL<<endl);
			LOG(5,<<"new alpha = " <<alphaFound<<endl<<endl);

			
			if (newL > _bestL+epsilonLikelihoodImprovment ) {// update of likelihood ,v and model.
				_bestL = newL;
				_bestAlpha = alphaFound;
				if (isGamma) static_cast<generalGammaDistribution*>(distr)->setAlpha(alphaFound);
				else static_cast<betaOmegaDistribution*>(distr)->setAlpha(alphaFound);
				for (int categor = 0; categor < spVec.size();categor++)
					static_cast<wYangModel*>(spVec[categor].getPijAccelerator()->getReplacementModel())->setW(distr->rates(categor)); 
				normalizeMatrices(spVec,distr);
				changed = true;
			} 
		}
//BETA (beta distribution parameter)
		if (!isBetaSet) {
			if (isGamma) initialGuessValueOfParamBeta = static_cast<generalGammaDistribution*>(distr)->getBeta();
			else initialGuessValueOfParamBeta = static_cast<betaOmegaDistribution*>(distr)->getBeta();
			newL = -brent(lowerValueOfParamBeta,
						initialGuessValueOfParamBeta,
						upperBoundOnBeta,
						evalParam(et,sc,spVec,-2,distr,isGamma),epsilonAlphaOptimization,&betaFound); 

			LOG(5,<<"current best L= "<<_bestL<<endl<<endl);
			LOG(5,<<"new L After beta= " << newL<<endl);
			LOG(5,<<"new beta = " <<betaFound<<endl<<endl);
		
			if (newL > _bestL+epsilonLikelihoodImprovment ) {// update of likelihood ,v and model.
				_bestL = newL;
				_bestBeta = betaFound;
				if (isGamma) static_cast<generalGammaDistribution*>(distr)->setBeta(betaFound);
				else static_cast<betaOmegaDistribution*>(distr)->setBeta(betaFound);
				for (int categor = 0; categor < spVec.size();categor++)
					static_cast<wYangModel*>(spVec[categor].getPijAccelerator()->getReplacementModel())->setW(distr->rates(categor)); 		
				normalizeMatrices(spVec,distr);
				changed = true;
			}
		}
//K parameter
		if (!isKappaSet){
			initialGuessValueOfParamTr =  static_cast<wYangModel*>(spVec[0].getPijAccelerator()->getReplacementModel())->getK();
			newL = -brent(lowerValueOfParamK,   //optimaize Tr
					initialGuessValueOfParamTr,
					upperValueOfParamK,
					evalParam(et,sc,spVec,0,distr,isGamma),epsilonKOptimization,&kFound); 
			
			LOG(5,<<"current best L= "<<_bestL<<endl<<endl);
			LOG(5,<<"new L After kappa= " << newL<<endl);
			LOG(5,<<"new kappa = " <<kFound<<endl);

			if (newL > _bestL+epsilonLikelihoodImprovment ) {// update of likelihood and model.
				_bestL = newL;
				_bestK = kFound;
				for (int categor = 0; categor < spVec.size();categor++)
					static_cast<wYangModel*>(spVec[categor].getPijAccelerator()->getReplacementModel())->setK(kFound); 
				normalizeMatrices(spVec,distr);
				changed = true;
			}
		}
//beta distribution part (betaProb and additional omega)
		if (isGamma==false && !isBetaProbSet){ //optimize  beta probs
			if (!isOmegaSet){ // optimize omega  (M8 or M8b)
				MDOUBLE omegaFound;
				newL = -brent(omegaLowerBoundary, 
						initialGuessValueOfParamOmega,
						omegaUpperBoundary,
						evalParam(et,sc,spVec,1,distr,isGamma),0.01,&omegaFound); 

				LOG(5,<<"current best L= "<<_bestL<<endl<<endl);
				LOG(5,<<"new L After additional omega caetgory = " << newL<<endl);
				LOG(5,<<"new additional omega caetgory = " <<omegaFound<<endl<<endl);
	
				if (newL > _bestL+epsilonLikelihoodImprovment ) {
					_bestL = newL;
					_bestOmega = omegaFound;
					static_cast<betaOmegaDistribution*>(distr)->setOmega(omegaFound);
					static_cast<wYangModel*>(spVec[spVec.size()-1].getPijAccelerator()->getReplacementModel())->setW(omegaFound); 	
					normalizeMatrices(spVec,distr);
					changed = true;
				}
			}
			MDOUBLE betaProbFound;	
			newL = -brent(0.0,initialGuessValueOfParamBetaProb,1.0,
					evalParam(et,sc,spVec,2,distr,isGamma),0.01,&betaProbFound); 

			LOG(5,<<"current best L= "<<_bestL<<endl<<endl);
			LOG(5,<<"new L After prob(additional omega caetgory)= " << newL<<endl);
			LOG(5,<<"new prob(additional omega caetgory)= " <<1 - betaProbFound<<endl<<endl);
			if (newL > _bestL+epsilonLikelihoodImprovment ) {// update of likelihood ,v and model.
				_bestL = newL;
				_bestBetaProb = betaProbFound;
				static_cast<betaOmegaDistribution*>(distr)->setBetaProb(betaProbFound);
				normalizeMatrices(spVec,distr);
				changed = true;
			}
		}
Пример #2
0
Variant ObjectMethodExpression::eval(VariableEnvironment &env) const {
    String name(m_name->get(env));
    Variant obj(m_obj->eval(env));
    if (!obj.is(KindOfObject)) {
        raise_error("Call to a member function %s() on a non-object",
                    name.c_str());
    }
#ifdef ENABLE_LATE_STATIC_BINDING
    EvalFrameInjection::EvalStaticClassNameHelper helper(obj.toObject());
#endif
    Variant cobj(env.currentObject());
    const MethodStatement *ms = NULL;
    if (cobj.is(KindOfObject) && obj.getObjectData() == cobj.getObjectData()) {
        // Have to try current class first for private method
        const ClassStatement *cls = env.currentClassStatement();
        if (cls) {
            const MethodStatement *ccms = cls->findMethod(name.c_str());
            if (ccms && ccms->getModifiers() & ClassStatement::Private) {
                ms = ccms;
            }
        }
    }
    if (!ms) {
        ms = obj.getObjectData()->getMethodStatement(name.data());
    }
    SET_LINE;
    if (ms) {
        return strongBind(ms->invokeInstanceDirect(toObject(obj), env, this));
    }

    // Handle builtins
    MethodCallPackage mcp1;
    mcp1.methodCall(obj, name, -1);
    const CallInfo* ci = mcp1.ci;
    // If the lookup failed methodCall() must throw an exception,
    // so if we reach here ci must not be NULL
    ASSERT(ci);
    unsigned int count = m_params.size();
    if (count <= 6) {
        CVarRef a0 = (count > 0) ? evalParam(env, ci, 0) : null;
        CVarRef a1 = (count > 1) ? evalParam(env, ci, 1) : null;
        CVarRef a2 = (count > 2) ? evalParam(env, ci, 2) : null;
        CVarRef a3 = (count > 3) ? evalParam(env, ci, 3) : null;
        CVarRef a4 = (count > 4) ? evalParam(env, ci, 4) : null;
        CVarRef a5 = (count > 5) ? evalParam(env, ci, 5) : null;
        return
            strongBind((ci->getMethFewArgs())(mcp1, count, a0, a1, a2, a3, a4, a5));
    }
    if (RuntimeOption::UseArgArray) {
        ArgArray *args = prepareArgArray(env, ci, count);
        return strongBind((ci->getMeth())(mcp1, args));
    }
    ArrayInit ai(count);
    for (unsigned int i = 0; i < count; ++i) {
        if (ci->mustBeRef(i)) {
            ai.setRef(m_params[i]->refval(env));
        } else if (ci->isRef(i)) {
            ai.setRef(m_params[i]->refval(env, 0));
        } else {
            ai.set(m_params[i]->eval(env));
        }
    }
    return strongBind((ci->getMeth())(mcp1, Array(ai.create())));
}