size_t HeterogeneousRateBirthDeath::computeStartIndex(size_t i) const { size_t node_index = i; while ( value->getNode(node_index).isRoot() == false && branch_histories[node_index].getNumberEvents() == 0) { node_index = value->getNode(node_index).getParent().getIndex(); } if ( value->getNode(node_index).isRoot() == false ) { const BranchHistory &bh = branch_histories[ node_index ]; const std::multiset<CharacterEvent*, CharacterEventCompare> &h = bh.getHistory(); CharacterEvent *event = *(h.begin()); return event->getState(); } else { return root_state->getValue()-1; } }
void HeterogeneousRateBirthDeath::executeMethod(const std::string &n, const std::vector<const DagNode *> &args, RbVector<double> &rv) const { if ( n == "averageSpeciationRate" ) { size_t num_branches = branch_histories.getNumberBranches(); const RbVector<double> &lambda = speciation->getValue(); rv.clear(); rv.resize( num_branches ); for (size_t i = 0; i < num_branches; ++i) { const TopologyNode &node = this->value->getNode( i ); const BranchHistory& bh = branch_histories[ i ]; const std::multiset<CharacterEvent*,CharacterEventCompare>& hist = bh.getHistory(); size_t state_index_rootwards = computeStartIndex( node.getParent().getIndex() ); double rate = 0; double begin_time = 0.0; double branch_length = node.getBranchLength(); for (std::multiset<CharacterEvent*,CharacterEventCompare>::const_iterator it=hist.begin(); it!=hist.end(); ++it) { CharacterEvent* event = *it; double end_time = event->getTime(); double time_interval = (end_time - begin_time) / branch_length; // we need to set the current rate caterogy size_t current_state = event->getState(); rate += time_interval * lambda[current_state]; begin_time = end_time; } rate += (branch_length-begin_time)/branch_length * lambda[state_index_rootwards]; rv[i] = rate; } } else if ( n == "averageExtinctionRate" ) { size_t num_branches = branch_histories.getNumberBranches(); const RbVector<double> &mu = extinction->getValue(); rv.clear(); rv.resize( num_branches ); for (size_t i = 0; i < num_branches; ++i) { const TopologyNode &node = this->value->getNode( i ); const BranchHistory& bh = branch_histories[ i ]; const std::multiset<CharacterEvent*,CharacterEventCompare>& hist = bh.getHistory(); size_t state_index_rootwards = computeStartIndex( node.getParent().getIndex() ); double rate = 0; double begin_time = 0.0; double branch_length = node.getBranchLength(); for (std::multiset<CharacterEvent*,CharacterEventCompare>::const_iterator it=hist.begin(); it!=hist.end(); ++it) { CharacterEvent* event = *it; double end_time = event->getTime(); double time_interval = (end_time - begin_time) / branch_length; // we need to set the current rate caterogy size_t current_state = event->getState(); rate += time_interval * mu[current_state]; begin_time = end_time; } rate += (branch_length-begin_time)/branch_length * mu[state_index_rootwards]; rv[i] = rate; } } else { throw RbException("The heterogeneous rate birth-death process does not have a member method called '" + n + "'."); } }
void HeterogeneousRateBirthDeath::computeNodeProbability(const RevBayesCore::TopologyNode &node, size_t node_index) { // check for recomputation if ( dirty_nodes[node_index] || true ) { // mark as computed dirty_nodes[node_index] = false; const BranchHistory& bh = branch_histories[ node_index ]; const std::multiset<CharacterEvent*,CharacterEventCompare>& hist = bh.getHistory(); // const std::vector<CharacterEvent*> child_states = bh.getChildCharacters(); // size_t start_index = child_states[0]->getState(); size_t state_index_tipwards = computeStartIndex( node_index ); size_t state_index_rootwards = computeStartIndex( node.getParent().getIndex() ); std::vector<double> initialState = std::vector<double>(1+num_rate_categories,0); if ( node.isTip() ) { // this is a tip node double samplingProbability = rho->getValue(); for (size_t i=0; i<num_rate_categories; ++i) { initialState[i] = 1.0 - samplingProbability; } initialState[num_rate_categories] = samplingProbability; } else { // this is an internal node const TopologyNode &left = node.getChild(0); size_t left_index = left.getIndex(); computeNodeProbability( left, left_index ); const TopologyNode &right = node.getChild(1); size_t right_index = right.getIndex(); computeNodeProbability( right, right_index ); // now compute the likelihoods of this internal node const std::vector<double> &leftStates = nodeStates[left_index][activeLikelihood[left_index]]; const std::vector<double> &rightStates = nodeStates[right_index][activeLikelihood[right_index]]; const RbVector<double> &birthRate = speciation->getValue(); for (size_t i=0; i<num_rate_categories; ++i) { initialState[i] = leftStates[i]; } initialState[num_rate_categories] = leftStates[num_rate_categories]*rightStates[num_rate_categories]*birthRate[ state_index_tipwards ]; } const RbVector<double> &s = speciation->getValue(); const RbVector<double> &e = extinction->getValue(); double r = event_rate->getValue(); double beginAge = node.getAge(); // remember that we go back in time (rootwards) double begin_time = 0.0; double branch_length = node.getBranchLength(); // set the previous state to an impossible state // we need this for checking if the states were different size_t previous_state = num_rate_categories; for (std::multiset<CharacterEvent*,CharacterEventCompare>::const_iterator it=hist.begin(); it!=hist.end(); ++it) { CharacterEvent* event = *it; double end_time = event->getTime(); double time_interval = end_time - begin_time; // we need to set the current rate category size_t current_state = event->getState(); // check that we got a distinct new state if ( previous_state == current_state ) { shift_same_category = true; } updateBranchProbabilitiesNumerically(initialState, beginAge, beginAge+time_interval, s, e, r, current_state); initialState[num_rate_categories] = initialState[num_rate_categories]*event_rate->getValue()* (1.0/num_rate_categories); begin_time = end_time; beginAge += time_interval; previous_state = current_state; } // check that we got a distinct new state if ( previous_state == state_index_rootwards ) { shift_same_category = true; } double time_interval = branch_length - begin_time; updateBranchProbabilitiesNumerically(initialState, beginAge, beginAge+time_interval, s, e, r, state_index_rootwards); // rescale the states double max = initialState[num_rate_categories]; initialState[num_rate_categories] = 1.0; // totalScaling -= scalingFactors[node_index][activeLikelihood[node_index]]; // scalingFactors[node_index][activeLikelihood[node_index]] = log(max); // totalScaling += scalingFactors[node_index][activeLikelihood[node_index]] - scalingFactors[node_index][activeLikelihood[node_index]^1]; // totalScaling += scalingFactors[node_index][activeLikelihood[node_index]]; totalScaling += log(max); // store the states nodeStates[node_index][activeLikelihood[node_index]] = initialState; } }