示例#1
0
void T92GCBranchTree::recursiveUpdate(const RevBayesCore::TopologyNode &from)    {

    size_t index = from.getIndex();

    double gc = 0.5;
    if (from.isRoot())    {
        gc = rootgc->getValue();
    }
    else    {        
        gc = gctree->getValue()[index];
    }

    RateMatrix_HKY& matrix = static_cast<RateMatrix_HKY&>( (*value)[index] );
    std::vector<double> v(4);
    v[0] = v[3] = 0.5 * (1 - gc);
    v[1] = v[2] = 0.5 * gc;
    matrix.setStationaryFrequenciesByCopy(v);
    matrix.setKappa(kappa->getValue());
    
    // simulate the val for each child (if any)
    size_t numChildren = from.getNumberOfChildren();
    for (size_t i = 0; i < numChildren; ++i) {
        const TopologyNode& child = from.getChild(i);
        recursiveUpdate(child);
    }
}
void BranchLengthTree::reverseParentChild(RevBayesCore::TopologyNode &n)
{
    
    if ( !n.isRoot() )
    {
        TopologyNode &p = n.getParent();
        reverseParentChild( p );
        p.removeChild( &n );
        p.setParent( &n );
        n.addChild( &p );
		
		// swap branch lengths
		double parent_branch_length = branchLengths[p.getIndex()];
		double child_branch_length = branchLengths[n.getIndex()];
		branchLengths[p.getIndex()] = child_branch_length;
		branchLengths[n.getIndex()] = parent_branch_length;
    }

}
void BranchLengthTree::reroot(RevBayesCore::TopologyNode &n)
{	
	// reset parent/child relationships
	reverseParentChild( n.getParent() );
    n.getParent().setParent( NULL );
	
	// get copies of the nodes and branchLengths
	std::vector<double> old_branchLengths = branchLengths;
	std::vector<TopologyNode*> nodes = getNodes();
	
	// set the new root
	topology->setRoot( &n.getParent() );
	
	// reset the branch lengths
    for (size_t i = 0; i < branchLengths.size(); ++i)
    {
		branchLengths[nodes[i]->getIndex()] = old_branchLengths[i];
    }
    
}
示例#4
0
void Func_simTree::simulateBalancedTree( size_t n, std::vector<RevBayesCore::TopologyNode*> &nodes )
{
    
    // check if the number of taxa is divideable by two
    size_t half = n / 2;
    if ( (half+half) != n )
    {
        throw RbException("Bad number of taxa.");
    }
    
    std::vector<RevBayesCore::TopologyNode*> children;
    for (size_t i = 0; i < nodes.size(); ++i)
    {
        RevBayesCore::TopologyNode *parent = nodes[i];
        
        // add a left child
        RevBayesCore::TopologyNode* leftChild = new RevBayesCore::TopologyNode(0);
        parent->addChild(leftChild);
        leftChild->setParent(parent);
        children.push_back(leftChild);
        
        // add a right child
        RevBayesCore::TopologyNode* rightChild = new RevBayesCore::TopologyNode(0);
        parent->addChild(rightChild);
        rightChild->setParent(parent);
        children.push_back(rightChild);
        
    }
    
    if ( half == 1 )
    {
        // we are done with the recursion
        for (size_t i = 0; i < children.size(); ++i)
        {
            RevBayesCore::TopologyNode *node = children[i];
            std::string name = "Taxon_" + StringUtilities::to_string(i+1);
            node->setName(name);
        }
        
    }
    else
    {
        simulateBalancedTree(half, children);
    }
    
}
示例#5
0
void Func_simTree::simulateCaterpillarTree( size_t n, RevBayesCore::TopologyNode* node )
{
    
    
    // add a left child
    RevBayesCore::TopologyNode* leftChild = new RevBayesCore::TopologyNode(0);
    node->addChild(leftChild);
    leftChild->setParent(node);
    
    // add a right child
    RevBayesCore::TopologyNode* rightChild = new RevBayesCore::TopologyNode(0);
    node->addChild(rightChild);
    rightChild->setParent(node);
    
    std::string name = "Taxon_" + StringUtilities::to_string(n);
    rightChild->setName(name);
    
    if ( n > 2 )
    {
        simulateCaterpillarTree(n-1, leftChild);
    }
    else
    {
        std::string name = "Taxon_" + StringUtilities::to_string(n-1);
        leftChild->setName(name);
    }
    
}
示例#6
0
void Func_simTree::setAges(RevBayesCore::TimeTree *t, RevBayesCore::TopologyNode &n)
{
    
    if ( n.isTip() )
    {
        t->setAge(n.getIndex(), 0.0);
    }
    else
    {
        RevBayesCore::TopologyNode &left = n.getChild( 0 );
        setAges(t, left);
        
        RevBayesCore::TopologyNode &right = n.getChild( 1 );
        setAges(t, right);
        
        double a = t->getAge(left.getIndex());
        double b = t->getAge(right.getIndex());
        double max = (a > b ? a : b);
        
        t->setAge(n.getIndex(), max + 1.0);
    }
    
}
示例#7
0
void HeterogeneousRateBirthDeath::computeNodeProbability(const RevBayesCore::TopologyNode &node, size_t node_index)
{
    
    // check for recomputation
    if ( dirty_nodes[node_index] || true )
    {
        // mark as computed
        dirty_nodes[node_index] = false;
        
        
        const BranchHistory& bh = branch_histories[ node_index ];
        const std::multiset<CharacterEvent*,CharacterEventCompare>& hist = bh.getHistory();
        
//        const std::vector<CharacterEvent*> child_states = bh.getChildCharacters();
//        size_t start_index = child_states[0]->getState();
        size_t state_index_tipwards  = computeStartIndex( node_index );
        size_t state_index_rootwards = computeStartIndex( node.getParent().getIndex() );

        std::vector<double> initialState = std::vector<double>(1+num_rate_categories,0);
        if ( node.isTip() )
        {
            // this is a tip node
            
            double samplingProbability = rho->getValue();
            for (size_t i=0; i<num_rate_categories; ++i)
            {
                initialState[i] = 1.0 - samplingProbability;
            }
            initialState[num_rate_categories] = samplingProbability;
            
        }
        else
        {
            // this is an internal node
            const TopologyNode &left = node.getChild(0);
            size_t left_index = left.getIndex();
            computeNodeProbability( left, left_index );
            const TopologyNode &right = node.getChild(1);
            size_t right_index = right.getIndex();
            computeNodeProbability( right, right_index );
            
            // now compute the likelihoods of this internal node
            const std::vector<double> &leftStates = nodeStates[left_index][activeLikelihood[left_index]];
            const std::vector<double> &rightStates = nodeStates[right_index][activeLikelihood[right_index]];
            const RbVector<double> &birthRate = speciation->getValue();
            for (size_t i=0; i<num_rate_categories; ++i)
            {
                initialState[i] = leftStates[i];
            }
            
            initialState[num_rate_categories] = leftStates[num_rate_categories]*rightStates[num_rate_categories]*birthRate[ state_index_tipwards ];
            
        }
        
        const RbVector<double> &s = speciation->getValue();
        const RbVector<double> &e = extinction->getValue();
        double                  r = event_rate->getValue();
        double beginAge = node.getAge();

        // remember that we go back in time (rootwards)
        double begin_time = 0.0;
        double branch_length = node.getBranchLength();
        
        // set the previous state to an impossible state
        // we need this for checking if the states were different
        size_t previous_state = num_rate_categories;
        for (std::multiset<CharacterEvent*,CharacterEventCompare>::const_iterator it=hist.begin(); it!=hist.end(); ++it)
        {
            CharacterEvent* event = *it;
            double end_time = event->getTime();
            double time_interval = end_time - begin_time;
            
            // we need to set the current rate category
            size_t current_state = event->getState();
            
            // check that we got a distinct new state
            if ( previous_state == current_state )
            {
                shift_same_category = true;
            }
            
            updateBranchProbabilitiesNumerically(initialState, beginAge, beginAge+time_interval, s, e, r, current_state);
            
            initialState[num_rate_categories] = initialState[num_rate_categories]*event_rate->getValue()* (1.0/num_rate_categories);
            
            
            begin_time = end_time;
            beginAge += time_interval;
            previous_state = current_state;
        }
        
        // check that we got a distinct new state
        if ( previous_state == state_index_rootwards )
        {
            shift_same_category = true;
        }
        
        double time_interval = branch_length - begin_time;
        updateBranchProbabilitiesNumerically(initialState, beginAge, beginAge+time_interval, s, e, r, state_index_rootwards);
        
        
        // rescale the states
        double max = initialState[num_rate_categories];
        initialState[num_rate_categories] = 1.0;
        
//        totalScaling -= scalingFactors[node_index][activeLikelihood[node_index]];
//        scalingFactors[node_index][activeLikelihood[node_index]] = log(max);
//        totalScaling += scalingFactors[node_index][activeLikelihood[node_index]] - scalingFactors[node_index][activeLikelihood[node_index]^1];
//        totalScaling += scalingFactors[node_index][activeLikelihood[node_index]];
        
        totalScaling += log(max);
        
        // store the states
        nodeStates[node_index][activeLikelihood[node_index]] = initialState;
    }
    
}
示例#8
0
void CharacterDependentCladoBirthDeathProcess::computeNodeProbability(const RevBayesCore::TopologyNode &node, size_t node_index) const
{
    
    // check for recomputation
    if ( dirty_nodes[node_index] || true )
    {
        // mark as computed
        dirty_nodes[node_index] = false;
        
        // get cladogenesis event map (sparse speciation rate matrix)
        const DeterministicNode<MatrixReal>* cpn = static_cast<const DeterministicNode<MatrixReal>* >( cladogenesis_matrix );
        const TypedFunction<MatrixReal>& tf = cpn->getFunction();
        const AbstractCladogenicStateFunction* csf = dynamic_cast<const AbstractCladogenicStateFunction*>( &tf );
        std::map<std::vector<unsigned>, double> eventMap = csf->getEventMap();
        
        state_type node_likelihood = std::vector<double>(2*num_states,0);
        if ( node.isTip() )
        {
            
            // this is a tip node
            double samplingProbability = rho->getValue();
            const DiscreteCharacterState &state = static_cast<TreeDiscreteCharacterData*>( this->value )->getCharacterData().getTaxonData( node.getTaxon().getName() )[0];
            const RbBitSet &obs_state = state.getState();

            for (size_t j = 0; j < num_states; ++j)
            {
                
                node_likelihood[j] = 1.0 - samplingProbability;
                
                if ( obs_state.isSet( j ) == true || state.isMissingState() == true || state.isGapState() == true )
                {
                    node_likelihood[num_states+j] = samplingProbability;
                }
                else
                {
                    node_likelihood[num_states+j] = 0.0;
                }
            }
            
        }
        else
        {
            
            // this is an internal node
            const TopologyNode &left = node.getChild(0);
            size_t left_index = left.getIndex();
            computeNodeProbability( left, left_index );
            const TopologyNode &right = node.getChild(1);
            size_t right_index = right.getIndex();
            computeNodeProbability( right, right_index );
            
            // get the likelihoods of descendant nodes
            const state_type &left_likelihoods = partial_likelihoods[left_index];
            const state_type &right_likelihoods = partial_likelihoods[right_index];
            
            // merge descendant likelihoods
            for (size_t i=1; i<num_states; ++i)
            {
                node_likelihood[i] = left_likelihoods[i];
                
                double like_sum = 0.0;
                std::map<std::vector<unsigned>, double>::iterator it;
                for (it = eventMap.begin(); it != eventMap.end(); it++)
                {
                    const std::vector<unsigned>& states = it->first;
                    double speciation_rate = it->second;
                    if (i == states[0])
                    {
                        double likelihoods = left_likelihoods[num_states + states[1]] * right_likelihoods[num_states + states[2]];
                        like_sum += speciation_rate * likelihoods;
                    }
                }
                node_likelihood[num_states + i] = like_sum;
            }
        }
        
        // calculate likelihood for this branch
        CDCladoSE ode = CDCladoSE(extinction_rates, &Q->getValue(), eventMap, rate->getValue());
        double beginAge = node.getAge();
        double endAge = node.getParent().getAge();
        double dt = root_age->getValue() / NUM_TIME_SLICES;
//        boost::numeric::odeint::runge_kutta4< state_type > stepper;
//        boost::numeric::odeint::integrate_const( stepper, ode , node_likelihood , beginAge , endAge, dt );
        boost::numeric::odeint::bulirsch_stoer< state_type > stepper(1E-8, 0.0, 0.0, 0.0);
        boost::numeric::odeint::integrate_adaptive( stepper, ode , node_likelihood , beginAge , endAge, dt );
        
        
        // store the likelihoods
        partial_likelihoods[node_index] = node_likelihood;
    }
    
}
示例#9
0
void StateDependentSpeciationExtinctionProcess::computeNodeProbability(const RevBayesCore::TopologyNode &node, size_t node_index) const
{
    
    // check for recomputation
//    if ( dirty_nodes[node_index] == true )
    if ( true )
    {
        // mark as computed
        dirty_nodes[node_index] = false;
        
        std::vector<double> node_likelihood = std::vector<double>(2 * num_states, 0);
        if ( node.isTip() == true )
        {
            
            // this is a tip node
            double samplingProbability = rho->getValue();
            const DiscreteCharacterState &state = static_cast<TreeDiscreteCharacterData*>( this->value )->getCharacterData().getTaxonData( node.getTaxon().getName() )[0];
            const RbBitSet &obs_state = state.getState();
            
            for (size_t j = 0; j < num_states; ++j)
            {
                
                node_likelihood[j] = 1.0 - samplingProbability;
                
                if ( obs_state.isSet( j ) == true || state.isMissingState() == true || state.isGapState() == true )
                {
                    node_likelihood[num_states+j] = samplingProbability;
                }
                else
                {
                    node_likelihood[num_states+j] = 0.0;
                }
            }
            
        }
        else
        {
            
            // this is an internal node
            const TopologyNode          &left           = node.getChild(0);
            size_t                      left_index      = left.getIndex();
            computeNodeProbability( left, left_index );
            const TopologyNode          &right          = node.getChild(1);
            size_t                      right_index     = right.getIndex();
            computeNodeProbability( right, right_index );
            
            // get the likelihoods of descendant nodes
            const std::vector<double> &left_likelihoods  = partial_likelihoods[left_index][active_likelihood[left_index]];
            const std::vector<double> &right_likelihoods = partial_likelihoods[right_index][active_likelihood[right_index]];

            std::map<std::vector<unsigned>, double> eventMap;
            std::vector<double> speciation_rates;
            if ( use_cladogenetic_events == true )
            {
                // get cladogenesis event map (sparse speciation rate matrix)
                const DeterministicNode<MatrixReal>* cpn = static_cast<const DeterministicNode<MatrixReal>* >( cladogenesis_matrix );
                const TypedFunction<MatrixReal>& tf = cpn->getFunction();
                const AbstractCladogenicStateFunction* csf = dynamic_cast<const AbstractCladogenicStateFunction*>( &tf );
                
                eventMap = csf->getEventMap();
            }
            else
            {
                speciation_rates = lambda->getValue();
            }
            
            // merge descendant likelihoods
            for (size_t i=0; i<num_states; ++i)
            {
                node_likelihood[i] = left_likelihoods[i];

                if ( use_cladogenetic_events == true )
                {
                    
                    double like_sum = 0.0;
                    std::map<std::vector<unsigned>, double>::iterator it;
                    for (it = eventMap.begin(); it != eventMap.end(); it++)
                    {
                        const std::vector<unsigned>& states = it->first;
                        double speciation_rate = it->second;
                        if (i == states[0])
                        {
                            double likelihoods = left_likelihoods[num_states + states[1]] * right_likelihoods[num_states + states[2]];
                            like_sum += speciation_rate * likelihoods;
                        }
                    }
                    node_likelihood[num_states + i] = like_sum;
                    
                }
                else
                {
                    node_likelihood[num_states + i] = left_likelihoods[num_states + i] * right_likelihoods[num_states + i] * speciation_rates[i];
                }
            }
        }
        
        // calculate likelihood for this branch
        double begin_age = node.getAge();
        double end_age = node.getParent().getAge();
        numericallyIntegrateProcess(node_likelihood, begin_age, end_age, true, false);
        
        // rescale the states
        double max = 0.0;
        for (size_t i=0; i<num_states; ++i)
        {
            if ( node_likelihood[num_states+i] > max )
            {
                max = node_likelihood[num_states+i];
            }
        }
        for (size_t i=0; i<num_states; ++i)
        {
            node_likelihood[num_states+i] /= max;
        }
        scaling_factors[node_index][active_likelihood[node_index]] = log(max);
        total_scaling += scaling_factors[node_index][active_likelihood[node_index]] - scaling_factors[node_index][active_likelihood[node_index]^1];
        
        // store the likelihoods
        partial_likelihoods[node_index][active_likelihood[node_index]] = node_likelihood;

    }
    
}
示例#10
0
void MultiRateBirthDeathProcess::computeNodeProbability(const RevBayesCore::TopologyNode &node, size_t node_index) const
{
    
    // check for recomputation
    if ( dirty_nodes[node_index] || true )
    {
        // mark as computed
        dirty_nodes[node_index] = false;
        
        state_type initialState = std::vector<double>(2*numRateCategories,0);
        if ( node.isTip() )
        {
            // this is a tip node
            
            double samplingProbability = rho->getValue();
            for (size_t i=0; i<numRateCategories; ++i)
            {
                initialState[i] = 1.0 - samplingProbability;
                initialState[numRateCategories+i] = samplingProbability;
            }

        }
        else
        {
            // this is an internal node
            const TopologyNode &left = node.getChild(0);
            size_t left_index = left.getIndex();
            computeNodeProbability( left, left_index );
            const TopologyNode &right = node.getChild(1);
            size_t right_index = right.getIndex();
            computeNodeProbability( right, right_index );
            
            // now compute the likelihoods of this internal node
            const state_type &leftStates = nodeStates[left_index][activeLikelihood[left_index]];
            const state_type &rightStates = nodeStates[right_index][activeLikelihood[right_index]];
            const RbVector<double> &birthRate = lambda->getValue();
            for (size_t i=0; i<numRateCategories; ++i)
            {
                initialState[i] = leftStates[i];
                initialState[numRateCategories+i] = leftStates[numRateCategories+i]*rightStates[numRateCategories+i]*birthRate[i];
            }

        }
        
        CDSE ode = CDSE(lambda->getValue(), mu->getValue(), &Q->getValue(), rate->getValue());
        double beginAge = node.getAge();
        double endAge = node.getParent().getAge();
        double dt = root_age->getValue() / NUM_TIME_SLICES;
        boost::numeric::odeint::runge_kutta4< state_type > stepper;
        boost::numeric::odeint::integrate_const( stepper, ode , initialState , beginAge , endAge, dt );
        
        // rescale the states
        double max = 0.0;
        for (size_t i=0; i<numRateCategories; ++i)
        {
            if ( initialState[numRateCategories+i] > max )
            {
                max = initialState[numRateCategories+i];
            }
        }
//        max = 1.0;
        for (size_t i=0; i<numRateCategories; ++i)
        {
            initialState[numRateCategories+i] /= max;
        }
        scalingFactors[node_index][activeLikelihood[node_index]] = log(max);
        totalScaling += scalingFactors[node_index][activeLikelihood[node_index]] - scalingFactors[node_index][activeLikelihood[node_index]^1];
        
        // store the states
        nodeStates[node_index][activeLikelihood[node_index]] = initialState;
    }
    
}