Beispiel #1
0
_String*    StringFromConsole   (bool)
{
    fflush(stdout);
    _String * returnme = new _String (32L, true);
#if not defined __HEADLESS__ && not defined _MINGW32_MEGA_
    int       readAChar;
    while    ((readAChar = getc(stdin)) != '\n') {
        if (readAChar == EOF) {
            CheckReceptacleAndStore (&hasEndBeenReached,empty,false,new _Constant (1.), false);
            break;
        }
        *returnme << readAChar;
    }
#else
    WarnError ("Unhandled standard input interaction in StringFromConsole for headless HyPhy");
    return NULL;
#endif
    returnme->Finalize ();
    return returnme;
}
Beispiel #2
0
void	_LikelihoodFunction::ReconstructAncestors (_DataSet &target,_SimpleList& doTheseOnes, _String& baseResultID,  bool sample, bool doMarginal, bool doLeaves)
/*
	Reconstruct ancestors for a likelihood function using 
 
-- target      :	the _DataSet object that will receive the results
-- doTheseOnes :	a _sorted_ array of partition indices to include in this operation; is assumed to contain valid indices (i.e. 0 -- number of partitions - 1)
-- baseResultID:	the HBL identifier of the dataset that will receive the result; used as a prefix for .marginal_support_matrix support matrix (when doMarginal = true)
-- sample	   :	if true, an ancestral sample (weighted by likelihood) is drawn, otherwise an ML (or maginal) reconstruction is carried out
-- doMarginal  :	if sample == false, doMarginal determines how the ancestors are reconstructed; if true, the reconstruction is marginal (maximizes
					the likelihood of each node while summing over the rest), otherwise it is joint.
-- doLeaves	   :	if sample == false and doMarginal == false (for now) and doLeaves == true, then the procedure will also
					reconstruct (joint ML) the best assignment of leaves
 
*/
{
	_DataSetFilter *dsf				= (_DataSetFilter*)dataSetFilterList (theDataFilters(doTheseOnes.lData[0]));	
	_TheTree    	*firstTree		= (_TheTree*)LocateVar(theTrees(doTheseOnes.lData[0]));
	
	target.SetTranslationTable		(dsf->GetData());	
	target.ConvertRepresentations(); 
	
	computationalResults.ZeroUsed();
	PrepareToCompute();
		
	// check if we need to deal with rate variation
	_Matrix			*rateAssignments = nil;
	if  (!doMarginal && indexCat.lLength>0)
		rateAssignments = (_Matrix*)checkPointer(ConstructCategoryMatrix(doTheseOnes,_hyphyLFConstructCategoryMatrixClasses,false));
	else
		Compute(); // need to do this to populate rate matrices
	
	long siteOffset			= 0,
		 patternOffset		= 0,
		 sequenceCount		;
	
	for (long i = 0; i<doTheseOnes.lLength; i++)
	{
		long	   partIndex    = doTheseOnes.lData[i];
		_TheTree   *tree		= (_TheTree*)LocateVar(theTrees(partIndex));
		dsf = (_DataSetFilter*)dataSetFilterList (theDataFilters(partIndex));
		
		long    catCounter = 0;
		
		if (rateAssignments)
		{
			_SimpleList				pcats;
			PartitionCatVars		(pcats,partIndex);
			catCounter			  = pcats.lLength;
		}
		
		if (i==0)
		{
			tree->AddNodeNamesToDS (&target,sample == false && doLeaves,!(doLeaves && doMarginal) ,2*(doMarginal == false && sample == false && doLeaves));
				// store internal or leaf node names in the dataset
			sequenceCount = target.GetNames().lLength;
		}
		else
		{
			if (!tree->Equal(firstTree)) // incompatible likelihood function
			{
				ReportWarning ((_String("Ancestor reconstruction had to ignore partition ")&_String(partIndex+1)&" of the likelihood function since it has a different tree topology than the first part."));
				continue;
			}
			_TranslationTable * mtt = target.GetTT()->MergeTables(dsf->GetData()->GetTT());
			if (mtt)
			{
				target.SetTranslationTable		(mtt);	
				DeleteObject					(mtt);
			}
			else
			{
				ReportWarning ((_String("Ancestor reconstruction had to ignore partition ")&_String(partIndex+1)&" of the likelihood function since it has a character alphabet incompatible with the first part."));
				continue;
			}
		}
		
		_List		* expandedMap	= dsf->ComputePatternToSiteMap(),
					* thisSet;
		
		if (sample)
		{
			_AVLListX   * nodeMapper	= tree->ConstructNodeToIndexMap(true);
			thisSet						= new _List;
			_SimpleList* tcc			= (_SimpleList*)treeTraversalMasks(partIndex);
			if (tcc)
			{
				long shifter = dsf->GetDimension()*dsf->NumberDistinctSites()*tree->GetINodeCount();
				for (long cc = 0; cc <= catCounter; cc++)
					tree->FillInConditionals(dsf, conditionalInternalNodeLikelihoodCaches[partIndex] + cc*shifter, tcc);
			}
			tree->SampleAncestorsBySequence (dsf, *(_SimpleList*)optimalOrders.lData[partIndex], 
												 &tree->GetRoot(), 
											     nodeMapper, 
											     conditionalInternalNodeLikelihoodCaches[partIndex],
												 *thisSet, 
											     nil,
											     *expandedMap,  
											     catCounter?rateAssignments->theData+siteOffset:nil, 
											     catCounter);
			
			
			nodeMapper->DeleteAll(false);DeleteObject (nodeMapper);
			
		}
		else
		{
			if (doMarginal)
			{
				_Matrix  *marginals = new _Matrix;
				_String  supportMxID = baseResultID & '.' & _hyMarginalSupportMatrix;
				thisSet = RecoverAncestralSequencesMarginal (partIndex, *marginals, *expandedMap, doLeaves);
				CheckReceptacleAndStore(&supportMxID, "ReconstructAncestors", true, marginals, false);
				
			}
			else
				thisSet = tree->RecoverAncestralSequences (dsf, 
															*(_SimpleList*)optimalOrders.lData[partIndex],
															*expandedMap,
															conditionalInternalNodeLikelihoodCaches[partIndex],
															catCounter?rateAssignments->theData+siteOffset:nil, 
															catCounter,
															conditionalTerminalNodeStateFlag[partIndex],
															(_GrowingVector*)conditionalTerminalNodeLikelihoodCaches(partIndex),
															doLeaves
															);
																												
		}
		
		
		_String * sampledString = (_String*)(*thisSet)(0);
		
		for (long siteIdx = 0; siteIdx<sampledString->sLength; siteIdx++)
			target.AddSite (sampledString->sData[siteIdx]);
		
		for (long seqIdx = 1;seqIdx < sequenceCount; seqIdx++)
		{
			sampledString = (_String*)(*thisSet)(seqIdx);
			for (long siteIdx = 0;siteIdx<sampledString->sLength; siteIdx++)
				target.Write2Site (siteOffset + siteIdx, sampledString->sData[siteIdx]);
		}
		DeleteObject (thisSet);
		DeleteObject (expandedMap);
		siteOffset	  += dsf->GetSiteCount();
		patternOffset += dsf->GetSiteCount();
	}
	
		
	target.Finalize();
	target.SetNoSpecies(target.GetNames().lLength);
	
	if (rateAssignments)
		DeleteObject (rateAssignments);
	
	DoneComputing ();
	
}
Beispiel #3
0
//__________________________________________________________________________________
bool CheckReceptacleAndStore (_String name, _String fID, bool checkValid, _PMathObj v, bool dup)
{
    return CheckReceptacleAndStore (&name, fID, checkValid, v, dup);
}