Пример #1
0
bool KLUPD::HttpProtocol::HttpHeader::parseFirstLine(const std::string &httpCodeBuffer)
{
    // Sample "HTTP/1.1 200 OK"
    if(httpCodeBuffer.size() < strlen("HTTP/1.1 200"))
        return false;

    // additional check
    if(_strnicmp(httpCodeBuffer.c_str(), "http/", strlen("http/")) != 0)
        return false;

    size_t codeOffset = httpCodeBuffer.find_first_not_of(' ', strlen("HTTP/1.1 "));

    // skip blanks
    while(httpCodeBuffer[codeOffset] == ' ')
        ++codeOffset;

    // http code expected
    if(httpCodeBuffer.size() < codeOffset + 3)
        return false;

    std::vector<char> codeBuffer(4, 0);
    strncpy(&codeBuffer[0], httpCodeBuffer.c_str() + codeOffset, 3);

    m_httpCode = atoi(&codeBuffer[0]);
    return true;
}
Пример #2
0
_List*	 _LikelihoodFunction::RecoverAncestralSequencesMarginal (long index, _Matrix & supportValues, _List& expandedSiteMap, bool doLeaves) 
// index:			which part to process
// supportValues:	for each internal node and site stores alphabetDimension values for the 
//				:	relative support of each residue at a given site
//				:   linearized 3D matrix
//				:   1st - node index (same order as flatTree)
//				:   2nd - site index (only unique patterns are stored)
//				:   3rd - the character

// doLeaves		:	compute support values leaves instead of internal nodes

{	
	
	_DataSetFilter* dsf				= (_DataSetFilter*)dataSetFilterList (theDataFilters(index));
	_TheTree		*blockTree		= (_TheTree*)LocateVar(theTrees.lData[index]);
					 
	long			patternCount					= dsf->NumberDistinctSites	(),
					alphabetDimension				= dsf->GetDimension			(),
					unitLength						= dsf->GetUnitLength		(),
					iNodeCount						= blockTree->GetINodeCount	(),
					leafCount						= blockTree->GetLeafCount   (),
					matrixSize						= doLeaves?leafCount:iNodeCount,
					siteCount						= dsf->GetSiteCount			(),
					shiftForTheNode					= patternCount * alphabetDimension;
	
	_Parameter		*siteLikelihoods				= new _Parameter [2*patternCount],
					*siteLikelihoodsSpecState		= new _Parameter [2*patternCount];
	
	_SimpleList		scalersBaseline, 
					scalersSpecState,
					branchValues,
					postToIn;
	
	blockTree->MapPostOrderToInOderTraversal (postToIn, doLeaves == false);
	supportValues.Clear						 ();
	CreateMatrix					         (&supportValues,matrixSize,shiftForTheNode,false,true,false);
	
	ComputeSiteLikelihoodsForABlock			 (index, siteLikelihoods, scalersBaseline); 
													// establish a baseline likelihood for each site
		
	if (doLeaves)
	{
		for								(long currentChar = 0; currentChar < alphabetDimension; currentChar++)
		{
			branchValues.Populate			(patternCount,currentChar,0);
			for (long branchID = 0; branchID < leafCount; branchID ++)
			{
				blockTree->AddBranchToForcedRecomputeList (branchID);
				long mappedBranchID = postToIn.lData[branchID];
				ComputeSiteLikelihoodsForABlock (index, siteLikelihoodsSpecState, scalersSpecState, 
												 branchID+iNodeCount, &branchValues);
				for (long siteID = 0; siteID < patternCount; siteID++)
				{
					long scaleDiff = (scalersSpecState.lData[siteID]-scalersBaseline.lData[siteID]);
					_Parameter ratio = siteLikelihoodsSpecState[siteID]/siteLikelihoods[siteID];
					
					if (scaleDiff > 0)
						ratio *= acquireScalerMultiplier(scaleDiff);
					supportValues.theData[mappedBranchID*shiftForTheNode + siteID*alphabetDimension + currentChar] = ratio;
				}
				blockTree->AddBranchToForcedRecomputeList (branchID);
			}			
		}
	}
	
	else
		for								(long currentChar = 0; currentChar < alphabetDimension-1; currentChar++)
			// the prob for the last char is  (1 - sum (probs other chars))
		{
			branchValues.Populate			(patternCount,currentChar,0);
			for (long branchID = 0; branchID < iNodeCount; branchID ++)
			{
				long mappedBranchID = postToIn.lData[branchID];
				ComputeSiteLikelihoodsForABlock (index, siteLikelihoodsSpecState, scalersSpecState, branchID, &branchValues);
				for (long siteID = 0; siteID < patternCount; siteID++)
				{
					long scaleDiff = (scalersSpecState.lData[siteID]-scalersBaseline.lData[siteID]);
					_Parameter ratio = siteLikelihoodsSpecState[siteID]/siteLikelihoods[siteID];
					if (scaleDiff > 0)
						ratio *= acquireScalerMultiplier(scaleDiff);
					supportValues.theData[mappedBranchID*shiftForTheNode + siteID*alphabetDimension + currentChar] = ratio;
				}
				blockTree->AddBranchToForcedRecomputeList (branchID+leafCount);
			}			
		}
	
	_SimpleList  conversion;
	_AVLListXL	 conversionAVL (&conversion);
	_String		 codeBuffer    (unitLength, false);
	_List	     *result	   = new _List;
	
	for (long k = 0; k < matrixSize; k++)
		result->AppendNewInstance (new _String(siteCount*unitLength,false));
	
	for (long siteID = 0; siteID < patternCount; siteID++)
	{
		_SimpleList*	patternMap = (_SimpleList*) expandedSiteMap (siteID);
				
		for  (long nodeID = 0; nodeID < matrixSize ; nodeID++)
		{
			long			mappedNodeID = postToIn.lData[nodeID];
			_Parameter		max_lik     = 0.,	
							sum			= 0.,
							*scores		= supportValues.theData + shiftForTheNode*mappedNodeID +  siteID*alphabetDimension;
			long			max_idx     = 0;

			for (long charID = 0; charID < alphabetDimension-(!doLeaves); charID ++)
			{
				sum+=scores[charID];
				if (scores[charID] > max_lik)
				{
					max_idx = charID; max_lik = scores[charID];
					
				}
			}
				   
			//if (fabs(scores[alphabetDimension-1]+sum-1.) > 0.1)
			//	WarnError (_String("Bad monkey!") & scores[alphabetDimension-1] & ":" & (1.-sum) );
			
			if (doLeaves)
			{
				sum = 1./sum;
				for (long charID = 0; charID < alphabetDimension; charID ++)
				{
					scores [charID] *= sum;
					/*if (siteID == 16)
						printf ("Site %ld Leaf %ld (%ld) Char %ld = %g\n", siteID, nodeID, mappedNodeID, charID, 
								supportValues.theData[mappedNodeID*shiftForTheNode + siteID*alphabetDimension + charID]);
					 */
		
				}
			}
			else
			{
				scores[alphabetDimension-1] = 1. - sum;

				if (scores[alphabetDimension-1] > max_lik)
					max_idx = alphabetDimension-1; 
			}
						
			dsf->ConvertCodeToLettersBuffered (dsf->CorrectCode(max_idx), unitLength, codeBuffer.sData, &conversionAVL);
			_String  *sequence   = (_String*) (*result)(mappedNodeID);
			
			for (long site = 0; site < patternMap->lLength; site++)
			{
				//if (patternMap->lData[site] == 119)
				//	printf ("%ld\n", 
				//			siteID);
				char* storeHere = sequence->sData + patternMap->lData[site]*unitLength;
				for (long charS = 0; charS < unitLength; charS ++)
					storeHere[charS] = codeBuffer.sData[charS];
			}
			
		}
	}
	delete [] siteLikelihoods; 
	delete [] siteLikelihoodsSpecState;
	return result;
}