/** * Perform the proposal. * * \return The hastings ratio. */ double EventBranchTimeBetaProposal::doProposal( void ) { CharacterHistory &history = distribution->getCharacterHistory(); RandomNumberGenerator *rng = GLOBAL_RNG; size_t num_events = history.getNumberEvents(); // we let the proposal fail if there is actually no event to slide failed = (num_events == 0); if ( failed == false ) { // pick a random event size_t branch_index = 0; CharacterEvent *event = history.pickRandomEvent( branch_index ); // we need to remove and add the event so that the events are back in time order history.removeEvent(event, branch_index); double branch_length = distribution->getValue().getNode(branch_index).getBranchLength(); // store the event stored_value = event; // get the current index stored_time = event->getTime(); // store the current branch stored_branch_index = branch_index; // draw new ages and compute the hastings ratio at the same time double m = stored_time / branch_length; double a = delta * m + offset; double b = delta * (1.0-m) + offset; double new_time = RbStatistics::Beta::rv(a, b, *rng); // compute the Hastings ratio double forward = RbStatistics::Beta::lnPdf(a, b, new_time); double new_a = delta * new_time + offset; double new_b = delta * (1.0-new_time) + offset; double backward = RbStatistics::Beta::lnPdf(new_a, new_b, stored_time / branch_length); // set the time event->setTime( new_time * branch_length ); // we need to remove and add the event so that the events are back in time order history.addEvent(event, branch_index); return backward - forward; } else { // we need to decrement the failed counter because we did not actually reject the new proposal move->decrementTriedCounter(); return RbConstants::Double::neginf; } return 0.0; }
void HeterogeneousRateBirthDeath::executeMethod(const std::string &n, const std::vector<const DagNode *> &args, RbVector<double> &rv) const { if ( n == "averageSpeciationRate" ) { size_t num_branches = branch_histories.getNumberBranches(); const RbVector<double> &lambda = speciation->getValue(); rv.clear(); rv.resize( num_branches ); for (size_t i = 0; i < num_branches; ++i) { const TopologyNode &node = this->value->getNode( i ); const BranchHistory& bh = branch_histories[ i ]; const std::multiset<CharacterEvent*,CharacterEventCompare>& hist = bh.getHistory(); size_t state_index_rootwards = computeStartIndex( node.getParent().getIndex() ); double rate = 0; double begin_time = 0.0; double branch_length = node.getBranchLength(); for (std::multiset<CharacterEvent*,CharacterEventCompare>::const_iterator it=hist.begin(); it!=hist.end(); ++it) { CharacterEvent* event = *it; double end_time = event->getTime(); double time_interval = (end_time - begin_time) / branch_length; // we need to set the current rate caterogy size_t current_state = event->getState(); rate += time_interval * lambda[current_state]; begin_time = end_time; } rate += (branch_length-begin_time)/branch_length * lambda[state_index_rootwards]; rv[i] = rate; } } else if ( n == "averageExtinctionRate" ) { size_t num_branches = branch_histories.getNumberBranches(); const RbVector<double> &mu = extinction->getValue(); rv.clear(); rv.resize( num_branches ); for (size_t i = 0; i < num_branches; ++i) { const TopologyNode &node = this->value->getNode( i ); const BranchHistory& bh = branch_histories[ i ]; const std::multiset<CharacterEvent*,CharacterEventCompare>& hist = bh.getHistory(); size_t state_index_rootwards = computeStartIndex( node.getParent().getIndex() ); double rate = 0; double begin_time = 0.0; double branch_length = node.getBranchLength(); for (std::multiset<CharacterEvent*,CharacterEventCompare>::const_iterator it=hist.begin(); it!=hist.end(); ++it) { CharacterEvent* event = *it; double end_time = event->getTime(); double time_interval = (end_time - begin_time) / branch_length; // we need to set the current rate caterogy size_t current_state = event->getState(); rate += time_interval * mu[current_state]; begin_time = end_time; } rate += (branch_length-begin_time)/branch_length * mu[state_index_rootwards]; rv[i] = rate; } } else { throw RbException("The heterogeneous rate birth-death process does not have a member method called '" + n + "'."); } }
void HeterogeneousRateBirthDeath::computeNodeProbability(const RevBayesCore::TopologyNode &node, size_t node_index) { // check for recomputation if ( dirty_nodes[node_index] || true ) { // mark as computed dirty_nodes[node_index] = false; const BranchHistory& bh = branch_histories[ node_index ]; const std::multiset<CharacterEvent*,CharacterEventCompare>& hist = bh.getHistory(); // const std::vector<CharacterEvent*> child_states = bh.getChildCharacters(); // size_t start_index = child_states[0]->getState(); size_t state_index_tipwards = computeStartIndex( node_index ); size_t state_index_rootwards = computeStartIndex( node.getParent().getIndex() ); std::vector<double> initialState = std::vector<double>(1+num_rate_categories,0); if ( node.isTip() ) { // this is a tip node double samplingProbability = rho->getValue(); for (size_t i=0; i<num_rate_categories; ++i) { initialState[i] = 1.0 - samplingProbability; } initialState[num_rate_categories] = samplingProbability; } else { // this is an internal node const TopologyNode &left = node.getChild(0); size_t left_index = left.getIndex(); computeNodeProbability( left, left_index ); const TopologyNode &right = node.getChild(1); size_t right_index = right.getIndex(); computeNodeProbability( right, right_index ); // now compute the likelihoods of this internal node const std::vector<double> &leftStates = nodeStates[left_index][activeLikelihood[left_index]]; const std::vector<double> &rightStates = nodeStates[right_index][activeLikelihood[right_index]]; const RbVector<double> &birthRate = speciation->getValue(); for (size_t i=0; i<num_rate_categories; ++i) { initialState[i] = leftStates[i]; } initialState[num_rate_categories] = leftStates[num_rate_categories]*rightStates[num_rate_categories]*birthRate[ state_index_tipwards ]; } const RbVector<double> &s = speciation->getValue(); const RbVector<double> &e = extinction->getValue(); double r = event_rate->getValue(); double beginAge = node.getAge(); // remember that we go back in time (rootwards) double begin_time = 0.0; double branch_length = node.getBranchLength(); // set the previous state to an impossible state // we need this for checking if the states were different size_t previous_state = num_rate_categories; for (std::multiset<CharacterEvent*,CharacterEventCompare>::const_iterator it=hist.begin(); it!=hist.end(); ++it) { CharacterEvent* event = *it; double end_time = event->getTime(); double time_interval = end_time - begin_time; // we need to set the current rate category size_t current_state = event->getState(); // check that we got a distinct new state if ( previous_state == current_state ) { shift_same_category = true; } updateBranchProbabilitiesNumerically(initialState, beginAge, beginAge+time_interval, s, e, r, current_state); initialState[num_rate_categories] = initialState[num_rate_categories]*event_rate->getValue()* (1.0/num_rate_categories); begin_time = end_time; beginAge += time_interval; previous_state = current_state; } // check that we got a distinct new state if ( previous_state == state_index_rootwards ) { shift_same_category = true; } double time_interval = branch_length - begin_time; updateBranchProbabilitiesNumerically(initialState, beginAge, beginAge+time_interval, s, e, r, state_index_rootwards); // rescale the states double max = initialState[num_rate_categories]; initialState[num_rate_categories] = 1.0; // totalScaling -= scalingFactors[node_index][activeLikelihood[node_index]]; // scalingFactors[node_index][activeLikelihood[node_index]] = log(max); // totalScaling += scalingFactors[node_index][activeLikelihood[node_index]] - scalingFactors[node_index][activeLikelihood[node_index]^1]; // totalScaling += scalingFactors[node_index][activeLikelihood[node_index]]; totalScaling += log(max); // store the states nodeStates[node_index][activeLikelihood[node_index]] = initialState; } }