void CharacterDependentCladoBirthDeathProcess::computeNodeProbability(const RevBayesCore::TopologyNode &node, size_t node_index) const { // check for recomputation if ( dirty_nodes[node_index] || true ) { // mark as computed dirty_nodes[node_index] = false; // get cladogenesis event map (sparse speciation rate matrix) const DeterministicNode<MatrixReal>* cpn = static_cast<const DeterministicNode<MatrixReal>* >( cladogenesis_matrix ); const TypedFunction<MatrixReal>& tf = cpn->getFunction(); const AbstractCladogenicStateFunction* csf = dynamic_cast<const AbstractCladogenicStateFunction*>( &tf ); std::map<std::vector<unsigned>, double> eventMap = csf->getEventMap(); state_type node_likelihood = std::vector<double>(2*num_states,0); if ( node.isTip() ) { // this is a tip node double samplingProbability = rho->getValue(); const DiscreteCharacterState &state = static_cast<TreeDiscreteCharacterData*>( this->value )->getCharacterData().getTaxonData( node.getTaxon().getName() )[0]; const RbBitSet &obs_state = state.getState(); for (size_t j = 0; j < num_states; ++j) { node_likelihood[j] = 1.0 - samplingProbability; if ( obs_state.isSet( j ) == true || state.isMissingState() == true || state.isGapState() == true ) { node_likelihood[num_states+j] = samplingProbability; } else { node_likelihood[num_states+j] = 0.0; } } } else { // this is an internal node const TopologyNode &left = node.getChild(0); size_t left_index = left.getIndex(); computeNodeProbability( left, left_index ); const TopologyNode &right = node.getChild(1); size_t right_index = right.getIndex(); computeNodeProbability( right, right_index ); // get the likelihoods of descendant nodes const state_type &left_likelihoods = partial_likelihoods[left_index]; const state_type &right_likelihoods = partial_likelihoods[right_index]; // merge descendant likelihoods for (size_t i=1; i<num_states; ++i) { node_likelihood[i] = left_likelihoods[i]; double like_sum = 0.0; std::map<std::vector<unsigned>, double>::iterator it; for (it = eventMap.begin(); it != eventMap.end(); it++) { const std::vector<unsigned>& states = it->first; double speciation_rate = it->second; if (i == states[0]) { double likelihoods = left_likelihoods[num_states + states[1]] * right_likelihoods[num_states + states[2]]; like_sum += speciation_rate * likelihoods; } } node_likelihood[num_states + i] = like_sum; } } // calculate likelihood for this branch CDCladoSE ode = CDCladoSE(extinction_rates, &Q->getValue(), eventMap, rate->getValue()); double beginAge = node.getAge(); double endAge = node.getParent().getAge(); double dt = root_age->getValue() / NUM_TIME_SLICES; // boost::numeric::odeint::runge_kutta4< state_type > stepper; // boost::numeric::odeint::integrate_const( stepper, ode , node_likelihood , beginAge , endAge, dt ); boost::numeric::odeint::bulirsch_stoer< state_type > stepper(1E-8, 0.0, 0.0, 0.0); boost::numeric::odeint::integrate_adaptive( stepper, ode , node_likelihood , beginAge , endAge, dt ); // store the likelihoods partial_likelihoods[node_index] = node_likelihood; } }
void StateDependentSpeciationExtinctionProcess::computeNodeProbability(const RevBayesCore::TopologyNode &node, size_t node_index) const { // check for recomputation // if ( dirty_nodes[node_index] == true ) if ( true ) { // mark as computed dirty_nodes[node_index] = false; std::vector<double> node_likelihood = std::vector<double>(2 * num_states, 0); if ( node.isTip() == true ) { // this is a tip node double samplingProbability = rho->getValue(); const DiscreteCharacterState &state = static_cast<TreeDiscreteCharacterData*>( this->value )->getCharacterData().getTaxonData( node.getTaxon().getName() )[0]; const RbBitSet &obs_state = state.getState(); for (size_t j = 0; j < num_states; ++j) { node_likelihood[j] = 1.0 - samplingProbability; if ( obs_state.isSet( j ) == true || state.isMissingState() == true || state.isGapState() == true ) { node_likelihood[num_states+j] = samplingProbability; } else { node_likelihood[num_states+j] = 0.0; } } } else { // this is an internal node const TopologyNode &left = node.getChild(0); size_t left_index = left.getIndex(); computeNodeProbability( left, left_index ); const TopologyNode &right = node.getChild(1); size_t right_index = right.getIndex(); computeNodeProbability( right, right_index ); // get the likelihoods of descendant nodes const std::vector<double> &left_likelihoods = partial_likelihoods[left_index][active_likelihood[left_index]]; const std::vector<double> &right_likelihoods = partial_likelihoods[right_index][active_likelihood[right_index]]; std::map<std::vector<unsigned>, double> eventMap; std::vector<double> speciation_rates; if ( use_cladogenetic_events == true ) { // get cladogenesis event map (sparse speciation rate matrix) const DeterministicNode<MatrixReal>* cpn = static_cast<const DeterministicNode<MatrixReal>* >( cladogenesis_matrix ); const TypedFunction<MatrixReal>& tf = cpn->getFunction(); const AbstractCladogenicStateFunction* csf = dynamic_cast<const AbstractCladogenicStateFunction*>( &tf ); eventMap = csf->getEventMap(); } else { speciation_rates = lambda->getValue(); } // merge descendant likelihoods for (size_t i=0; i<num_states; ++i) { node_likelihood[i] = left_likelihoods[i]; if ( use_cladogenetic_events == true ) { double like_sum = 0.0; std::map<std::vector<unsigned>, double>::iterator it; for (it = eventMap.begin(); it != eventMap.end(); it++) { const std::vector<unsigned>& states = it->first; double speciation_rate = it->second; if (i == states[0]) { double likelihoods = left_likelihoods[num_states + states[1]] * right_likelihoods[num_states + states[2]]; like_sum += speciation_rate * likelihoods; } } node_likelihood[num_states + i] = like_sum; } else { node_likelihood[num_states + i] = left_likelihoods[num_states + i] * right_likelihoods[num_states + i] * speciation_rates[i]; } } } // calculate likelihood for this branch double begin_age = node.getAge(); double end_age = node.getParent().getAge(); numericallyIntegrateProcess(node_likelihood, begin_age, end_age, true, false); // rescale the states double max = 0.0; for (size_t i=0; i<num_states; ++i) { if ( node_likelihood[num_states+i] > max ) { max = node_likelihood[num_states+i]; } } for (size_t i=0; i<num_states; ++i) { node_likelihood[num_states+i] /= max; } scaling_factors[node_index][active_likelihood[node_index]] = log(max); total_scaling += scaling_factors[node_index][active_likelihood[node_index]] - scaling_factors[node_index][active_likelihood[node_index]^1]; // store the likelihoods partial_likelihoods[node_index][active_likelihood[node_index]] = node_likelihood; } }