/** * Perform the proposal. * * A Uniform-simplex proposal randomly changes some values of a simplex, although the other values * change too because of the renormalization. * First, some random indices are drawn. Then, the proposal draws a new somplex * u ~ Uniform(val[index] * alpha) * where alpha is the tuning parameter.The new value is set to u. * The simplex is then renormalized. * * \return The hastings ratio. */ double RootTimeSlideUniformProposal::doProposal( void ) { // Get random number generator RandomNumberGenerator* rng = GLOBAL_RNG; Tree& tau = variable->getValue(); // pick a random node which is not the root and neithor the direct descendant of the root TopologyNode* node = &tau.getRoot(); // we need to work with the times double my_age = node->getAge(); double child_Age = node->getChild( 0 ).getAge(); if ( child_Age < node->getChild( 1 ).getAge()) { child_Age = node->getChild( 1 ).getAge(); } // now we store all necessary values storedAge = my_age; // draw new ages and compute the hastings ratio at the same time double my_new_age = (origin->getValue() - child_Age) * rng->uniform01() + child_Age; // set the age node->setAge( my_new_age ); return 0.0; }
void AutocorrelatedBranchMatrixDistribution::recursiveSimulate(const TopologyNode& node, RbVector< RateMatrix > *values, const std::vector< double > &scaledParent) { // get the index size_t nodeIndex = node.getIndex(); // first we simulate our value RandomNumberGenerator* rng = GLOBAL_RNG; // do we keep our parents values? double u = rng->uniform01(); if ( u < changeProbability->getValue() ) { // change // draw a new value for the base frequencies std::vector<double> newParent = RbStatistics::Dirichlet::rv(scaledParent, *rng); std::vector<double> newScaledParent = newParent; // compute the new scaled parent std::vector<double>::iterator end = newScaledParent.end(); for (std::vector<double>::iterator it = newScaledParent.begin(); it != end; ++it) { (*it) *= alpha->getValue(); } RateMatrix_GTR rm = RateMatrix_GTR( newParent.size() ); RbPhylogenetics::Gtr::computeRateMatrix( exchangeabilityRates->getValue(), newParent, &rm ); uniqueBaseFrequencies.push_back( newParent ); uniqueMatrices.push_back( rm ); matrixIndex[nodeIndex] = uniqueMatrices.size()-1; values->insert(nodeIndex, rm); size_t numChildren = node.getNumberOfChildren(); if ( numChildren > 0 ) { for (size_t i = 0; i < numChildren; ++i) { const TopologyNode& child = node.getChild(i); recursiveSimulate(child,values,newScaledParent); } } } else { // no change size_t parentIndex = node.getParent().getIndex(); values->insert(nodeIndex, uniqueMatrices[ matrixIndex[ parentIndex ] ]); size_t numChildren = node.getNumberOfChildren(); if ( numChildren > 0 ) { for (size_t i = 0; i < numChildren; ++i) { const TopologyNode& child = node.getChild(i); recursiveSimulate(child,values,scaledParent); } } } }
size_t SpeciesTreeNodeSlideProposal::fillPreorderIndices(TopologyNode &node, size_t loc, std::vector<size_t> &indices) { if ( node.isInternal() == true ) { size_t l = fillPreorderIndices(node.getChild( 0 ), loc, indices); indices[node.getIndex()] = l; loc = fillPreorderIndices(node.getChild( 1 ), l + 1, indices); } return loc; }
void MultivariateBrownianPhyloProcess::recursiveCorruptAll(const TopologyNode& from) { dirtyNodes[from.getIndex()] = true; for (size_t i = 0; i < from.getNumberOfChildren(); ++i) { recursiveCorruptAll(from.getChild(i)); } }
void MultivariateBrownianPhyloProcess::recursiveSimulate(const TopologyNode& from) { size_t index = from.getIndex(); if (from.isRoot()) { std::vector<double>& val = (*value)[index]; for (size_t i=0; i<getDim(); i++) { val[i] = 0; } } else { // x ~ normal(x_up, sigma^2 * branchLength) std::vector<double>& val = (*value)[index]; sigma->getValue().drawNormalSampleCovariance((*value)[index]); size_t upindex = from.getParent().getIndex(); std::vector<double>& upval = (*value)[upindex]; for (size_t i=0; i<getDim(); i++) { val[i] += upval[i]; } } // propagate forward size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { recursiveSimulate(from.getChild(i)); } }
void RealNodeContainer::recursiveClampAt(const TopologyNode& from, const ContinuousCharacterData* data, size_t l) { if (from.isTip()) { // get taxon index size_t index = from.getIndex(); std::string taxon = tree->getTipNames()[index]; size_t dataindex = data->getIndexOfTaxon(taxon); if (data->getCharacter(dataindex,l) != -1000) { (*this)[index] = data->getCharacter(dataindex,l); clampVector[index] = true; //std::cerr << "taxon : " << index << '\t' << taxon << " trait value : " << (*this)[index] << '\n'; } else { std::cerr << "taxon : " << taxon << " is missing for trait " << l+1 << '\n'; } } // propagate forward size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { recursiveClampAt(from.getChild(i),data,l); } }
void PhyloWhiteNoiseProcess::recursiveSimulate(const TopologyNode& from) { if (! from.isRoot()) { // get the index size_t index = from.getIndex(); // compute the variance along the branch double mean = 1.0; double stdev = sigma->getValue() / sqrt(from.getBranchLength()); double alpha = mean * mean / (stdev * stdev); double beta = mean / (stdev * stdev); // simulate a new Val RandomNumberGenerator* rng = GLOBAL_RNG; double v = RbStatistics::Gamma::rv( alpha,beta, *rng); // we store this val here (*value)[index] = v; } // simulate the val for each child (if any) size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { const TopologyNode& child = from.getChild(i); recursiveSimulate(child); } }
double BrownianPhyloProcess::recursiveLnProb( const TopologyNode& from ) { double lnProb = 0.0; size_t index = from.getIndex(); double val = (*value)[index]; if (! from.isRoot()) { // x ~ normal(x_up, sigma^2 * branchLength) size_t upindex = from.getParent().getIndex(); double standDev = sigma->getValue() * sqrt(from.getBranchLength()); double mean = (*value)[upindex] + drift->getValue() * from.getBranchLength(); lnProb += RbStatistics::Normal::lnPdf(val, standDev, mean); } // propagate forward size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { lnProb += recursiveLnProb(from.getChild(i)); } return lnProb; }
void AutocorrelatedLognormalRateBranchwiseVarDistribution::recursiveSimulate(const TopologyNode& node, double parentRate) { // get the index size_t nodeIndex = node.getIndex(); // compute the variance along the branch double scale = scaleValue->getValue(); double variance = sigma->getValue()[nodeIndex] * node.getBranchLength() * scale; double mu = log(parentRate) - (variance * 0.5); double stDev = sqrt(variance); // simulate a new rate RandomNumberGenerator* rng = GLOBAL_RNG; double nodeRate = RbStatistics::Lognormal::rv( mu, stDev, *rng ); // we store this rate here (*value)[nodeIndex] = nodeRate; // simulate the rate for each child (if any) size_t numChildren = node.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { const TopologyNode& child = node.getChild(i); recursiveSimulate(child,nodeRate); } }
double AutocorrelatedLognormalRateBranchwiseVarDistribution::recursiveLnProb( const TopologyNode& n ) { // get the index size_t nodeIndex = n.getIndex(); double lnProb = 0.0; size_t numChildren = n.getNumberOfChildren(); double scale = scaleValue->getValue(); if ( numChildren > 0 ) { double parentRate = log( (*value)[nodeIndex] ); for (size_t i = 0; i < numChildren; ++i) { const TopologyNode& child = n.getChild(i); lnProb += recursiveLnProb(child); size_t childIndex = child.getIndex(); // compute the variance double variance = sigma->getValue()[childIndex] * child.getBranchLength() * scale; double childRate = (*value)[childIndex]; // the mean of the LN dist is parentRate = exp[mu + (variance / 2)], // where mu is the location param of the LN dist (see Kishino & Thorne 2001) double mu = parentRate - (variance * 0.5); double stDev = sqrt(variance); lnProb += RbStatistics::Lognormal::lnPdf(mu, stDev, childRate); } } return lnProb; }
void BrownianPhyloProcess::recursiveSimulate(const TopologyNode& from) { size_t index = from.getIndex(); if (! from.isRoot()) { // x ~ normal(x_up, sigma^2 * branchLength) size_t upindex = from.getParent().getIndex(); double standDev = sigma->getValue() * sqrt(from.getBranchLength()); double mean = (*value)[upindex] + drift->getValue() * from.getBranchLength(); // simulate the new Val RandomNumberGenerator* rng = GLOBAL_RNG; (*value)[index] = RbStatistics::Normal::rv( mean, standDev, *rng); } // propagate forward size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { recursiveSimulate(from.getChild(i)); } }
TopologyNode* SpeciesTreeNodeSlideProposal::mauReconstructSub(Tree &tree, size_t from, size_t to, std::vector<TopologyNode*> &order, std::vector<bool>&wasSwaped) { if( from == to ) { return order[2*from]; } size_t node_index = -1; { double h = -1; for(size_t i = from; i < to; ++i) { double v = order[2 * i + 1]->getAge(); if( h < v ) { h = v; node_index = i; } } } TopologyNode* node = order[2 * node_index + 1]; TopologyNode& lchild = node->getChild( 0 ); TopologyNode& rchild = node->getChild( 1 ); TopologyNode* lTargetChild = mauReconstructSub(tree, from, node_index, order, wasSwaped); TopologyNode* rTargetChild = mauReconstructSub(tree, node_index+1, to, order, wasSwaped); if( wasSwaped[node_index] ) { TopologyNode* z = lTargetChild; lTargetChild = rTargetChild; rTargetChild = z; } if( &lchild != lTargetChild ) { // replace the left child node->removeChild( &lchild ); node->addChild( lTargetChild ); } if( &rchild != rTargetChild ) { // replace the right child node->removeChild( &rchild ); node->addChild( rTargetChild ); } return node; }
/** * Perform the proposal. * * A Uniform-simplex proposal randomly changes some values of a simplex, although the other values * change too because of the renormalization. * First, some random indices are drawn. Then, the proposal draws a new somplex * u ~ Uniform(val[index] * alpha) * where alpha is the tuning parameter.The new value is set to u. * The simplex is then renormalized. * * \return The hastings ratio. */ double NodeTimeSlideUniformProposal::doProposal( void ) { // Get random number generator RandomNumberGenerator* rng = GLOBAL_RNG; Tree& tau = variable->getValue(); // pick a random node which is not the root and neithor the direct descendant of the root TopologyNode* node; do { double u = rng->uniform01(); size_t index = size_t( std::floor(tau.getNumberOfNodes() * u) ); node = &tau.getNode(index); } while ( node->isRoot() || node->isTip() ); TopologyNode& parent = node->getParent(); // we need to work with the times double parent_age = parent.getAge(); double my_age = node->getAge(); double child_Age = node->getChild( 0 ).getAge(); if ( child_Age < node->getChild( 1 ).getAge()) { child_Age = node->getChild( 1 ).getAge(); } // now we store all necessary values storedNode = node; storedAge = my_age; // draw new ages and compute the hastings ratio at the same time double my_new_age = (parent_age-child_Age) * rng->uniform01() + child_Age; // set the age tau.getNode(node->getIndex()).setAge( my_new_age ); return 0.0; }
/** * Recursive call to attach ordered interior node times to the time tree psi. Call it initially with the * root of the tree. */ void HeterogeneousRateBirthDeath::attachTimes(Tree* psi, std::vector<TopologyNode *> &nodes, size_t index, const std::vector<double> &interiorNodeTimes, double originTime ) { if (index < num_taxa-1) { // Get the rng RandomNumberGenerator* rng = GLOBAL_RNG; // Randomly draw one node from the list of nodes size_t node_index = static_cast<size_t>( floor(rng->uniform01()*nodes.size()) ); // Get the node from the list TopologyNode* parent = nodes.at(node_index); psi->getNode( parent->getIndex() ).setAge( originTime - interiorNodeTimes[index] ); // Remove the randomly drawn node from the list nodes.erase(nodes.begin()+long(node_index)); // Add the left child if an interior node TopologyNode* leftChild = &parent->getChild(0); if ( !leftChild->isTip() ) { nodes.push_back(leftChild); } // Add the right child if an interior node TopologyNode* rightChild = &parent->getChild(1); if ( !rightChild->isTip() ) { nodes.push_back(rightChild); } // Recursive call to this function attachTimes(psi, nodes, index+1, interiorNodeTimes, originTime); } }
void RealNodeContainer::recursiveGetStats(const TopologyNode& from, double& e1, double& e2, int& n) const { double tmp = (*this)[from.getIndex()]; n++; e1 += tmp; e2 += tmp * tmp; // propagate forward size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { recursiveGetStats(from.getChild(i),e1,e2,n); } }
double MultivariateBrownianPhyloProcess::recursiveLnProb( const TopologyNode& from ) { double lnProb = 0.0; size_t index = from.getIndex(); std::vector<double> val = (*value)[index]; if (! from.isRoot()) { if (1) { // if (dirtyNodes[index]) { // x ~ normal(x_up, sigma^2 * branchLength) size_t upindex = from.getParent().getIndex(); std::vector<double> upval = (*value)[upindex]; const MatrixReal& om = sigma->getValue().getInverse(); double s2 = 0; for (size_t i = 0; i < getDim(); i++) { double tmp = 0; for (size_t j = 0; j < getDim(); j++) { tmp += om[i][j] * (val[j] - upval[j]); } s2 += (val[i] - upval[i]) * tmp; } double logprob = 0; logprob -= 0.5 * s2 / from.getBranchLength(); logprob -= 0.5 * (sigma->getValue().getLogDet() + sigma->getValue().getDim() * log(from.getBranchLength())); nodeLogProbs[index] = logprob; dirtyNodes[index] = false; } lnProb += nodeLogProbs[index]; } // propagate forward size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { lnProb += recursiveLnProb(from.getChild(i)); } return lnProb; }
double AutocorrelatedBranchMatrixDistribution::recursiveLnProb( const TopologyNode& n ) { // get the index size_t nodeIndex = n.getIndex(); double lnProb = 0.0; size_t numChildren = n.getNumberOfChildren(); if ( numChildren > 0 ) { std::vector<double> parent = (*value)[nodeIndex].getStationaryFrequencies(); std::vector<double>::iterator end = parent.end(); for (std::vector<double>::iterator it = parent.begin(); it != end; ++it) { (*it) *= alpha->getValue(); } for (size_t i = 0; i < numChildren; ++i) { const TopologyNode& child = n.getChild(i); lnProb += recursiveLnProb(child); size_t childIndex = child.getIndex(); // RateMatrix& rm = (*value)[childIndex]; // compare if the child has a different matrix if ( matrixIndex[nodeIndex] == matrixIndex[childIndex] ) { // no change -> just the probability of no change lnProb += log( 1.0 - changeProbability->getValue() ); } else { // change: // probability of change lnProb += log( changeProbability->getValue() ); const std::vector<double>& descendant = (*value)[childIndex].getStationaryFrequencies(); // const std::vector<double>& descendant = uniqueMatrices[ matrixIndex[childIndex] ].getStationaryFrequencies(); // probability of new descendant values double p = RbStatistics::Dirichlet::lnPdf(parent, descendant); lnProb += p; } } } return lnProb; }
void RealNodeContainer::recursiveGetTipValues(const TopologyNode& from, ContinuousCharacterData& nameToVal) const { if(from.isTip()) { double tmp = (*this)[from.getIndex()]; std::string name = tree->getTipNames()[from.getIndex()]; ContinuousTaxonData dataVec = ContinuousTaxonData(name); double contObs = tmp; dataVec.addCharacter( contObs ); nameToVal.addTaxonData( dataVec ); return; } // propagate forward size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { recursiveGetTipValues(from.getChild(i), nameToVal ); } }
std::string RealNodeContainer::recursiveGetNewick(const TopologyNode& from) const { std::ostringstream s; if (from.isTip()) { s << getTimeTree()->getTipNames()[from.getIndex()] << "_"; // std::cerr << from.getIndex() << '\t' << getTimeTree()->getTipNames()[from.getIndex()] << "_"; // std::cerr << (*this)[from.getIndex()] << '\n'; // exit(1); } else { s << "("; // propagate forward size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { s << recursiveGetNewick(from.getChild(i)); if (i < numChildren-1) { s << ","; } } s << ")"; } s << (*this)[from.getIndex()]; /* if (from.isTip() && (! isClamped(from.getIndex()))) { std::cerr << "leaf is not clamped\n"; // get taxon index size_t index = from.getIndex(); std::cerr << "index : " << index << '\n'; std::string taxon = tree->getTipNames()[index]; std::cerr << "taxon : " << index << '\t' << taxon << '\n'; std::cerr << " trait value : " << (*this)[index] << '\n'; exit(1); }*/ // if (!from.isRoot()) { s << ":"; s << getTimeTree()->getBranchLength(from.getIndex()); // } return s.str(); }
double PhyloWhiteNoiseProcess::recursiveLnProb(const TopologyNode &from) { double lnProb = 0.0; if (! from.isRoot()) { // compute the variance double mean = 1.0; double stdev = sigma->getValue() / sqrt(from.getBranchLength()); double alpha = mean * mean / (stdev * stdev); double beta = mean / (stdev * stdev); double v = (*value)[from.getIndex()]; lnProb += log( RbStatistics::Gamma::lnPdf(alpha,beta,v) ); } size_t numChildren = from.getNumberOfChildren(); for (size_t i = 0; i < numChildren; ++i) { const TopologyNode& child = from.getChild(i); lnProb += recursiveLnProb(child); } return lnProb; }
void StateDependentSpeciationExtinctionProcess::recursivelyDrawJointConditionalAncestralStates(const TopologyNode &node, std::vector<size_t>& startStates, std::vector<size_t>& endStates) { size_t node_index = node.getIndex(); if ( node.isTip() == true ) { const AbstractHomologousDiscreteCharacterData& data = static_cast<TreeDiscreteCharacterData*>(this->value)->getCharacterData(); const AbstractDiscreteTaxonData& taxon_data = data.getTaxonData( node.getName() ); const DiscreteCharacterState &char_state = taxon_data.getCharacter(0); // we need to treat ambiguous state differently if ( char_state.isAmbiguous() == false ) { endStates[node_index] = char_state.getStateIndex(); } else { // initialize the conditional likelihoods for this branch state_type branch_conditional_probs = std::vector<double>(2 * num_states, 0); size_t start_state = startStates[node_index]; branch_conditional_probs[ num_states + start_state ] = 1.0; // first calculate extinction likelihoods via a backward time pass double end_age = node.getParent().getAge(); numericallyIntegrateProcess(branch_conditional_probs, 0, end_age, true, true); // now calculate conditional likelihoods along branch in forward time end_age = node.getParent().getAge() - node.getAge(); numericallyIntegrateProcess(branch_conditional_probs, 0, end_age, false, false); double total_prob = 0.0; for (size_t i = 0; i < num_states; ++i) { if ( char_state.isMissingState() == true || char_state.isGapState() == true || char_state.isStateSet(i) == true ) { total_prob += branch_conditional_probs[i]; } } RandomNumberGenerator* rng = GLOBAL_RNG; size_t u = rng->uniform01() * total_prob; for (size_t i = 0; i < num_states; ++i) { if ( char_state.isMissingState() == true || char_state.isGapState() == true || char_state.isStateSet(i) == true ) { u -= branch_conditional_probs[i]; if ( u <= 0.0 ) { endStates[node_index] = i; break; } } } } } else { // sample characters by their probability conditioned on the branch's start state going to end states // initialize the conditional likelihoods for this branch state_type branch_conditional_probs = std::vector<double>(2 * num_states, 0); size_t start_state = startStates[node_index]; branch_conditional_probs[ num_states + start_state ] = 1.0; // first calculate extinction likelihoods via a backward time pass double end_age = node.getParent().getAge(); numericallyIntegrateProcess(branch_conditional_probs, 0, end_age, true, true); // now calculate conditional likelihoods along branch in forward time end_age = node.getParent().getAge() - node.getAge(); numericallyIntegrateProcess(branch_conditional_probs, 0, end_age, false, false); // TODO: if character mapping compute likelihoods for each time slice.... // double current_time_slice = floor(begin_age / dt); // bool computed_at_least_one = false; // // // first iterate forward along the branch subtending this node to get the // // probabilities of the end states conditioned on the start state // while (current_time_slice * dt >= end_age || !computed_at_least_one) // { // double begin_age_slice = current_time_slice * dt; // double end_age_slice = (current_time_slice + 1) * dt; // numericallyIntegrateProcess(branch_conditional_probs, begin_age_slice, end_age_slice, false); // // computed_at_least_one = true; // current_time_slice--; // } std::map<std::vector<unsigned>, double> event_map; std::vector<double> speciation_rates; if ( use_cladogenetic_events == true ) { // get cladogenesis event map (sparse speciation rate matrix) const DeterministicNode<MatrixReal>* cpn = static_cast<const DeterministicNode<MatrixReal>* >( cladogenesis_matrix ); const TypedFunction<MatrixReal>& tf = cpn->getFunction(); const AbstractCladogenicStateFunction* csf = dynamic_cast<const AbstractCladogenicStateFunction*>( &tf ); event_map = csf->getEventMap(); } else { speciation_rates = lambda->getValue(); } // get likelihoods of descendant nodes const TopologyNode &left = node.getChild(0); size_t left_index = left.getIndex(); state_type left_likelihoods = partial_likelihoods[left_index][active_likelihood[left_index]]; const TopologyNode &right = node.getChild(1); size_t right_index = right.getIndex(); state_type right_likelihoods = partial_likelihoods[right_index][active_likelihood[right_index]]; std::map<std::vector<unsigned>, double> sample_probs; double sample_probs_sum = 0.0; std::map<std::vector<unsigned>, double>::iterator it; // calculate probabilities for each state if ( use_cladogenetic_events == true ) { // iterate over each cladogenetic event possible // and initialize probabilities for each clado event for (it = event_map.begin(); it != event_map.end(); it++) { const std::vector<unsigned>& states = it->first; double speciation_rate = it->second; // we need to sample from the ancestor, left, and right states jointly, // so keep track of the probability of each clado event double prob = left_likelihoods[num_states + states[1]] * right_likelihoods[num_states + states[2]]; prob *= speciation_rate * branch_conditional_probs[num_states + states[0]]; sample_probs[ states ] = prob; sample_probs_sum += prob; } } else { for (size_t i = 0; i < num_states; i++) { double prob = left_likelihoods[num_states + i] * right_likelihoods[num_states + i] * speciation_rates[i]; prob *= branch_conditional_probs[num_states + i]; std::vector<unsigned> states = boost::assign::list_of(i)(i)(i); sample_probs[ states ] = prob; sample_probs_sum += prob; } } // finally, sample ancestor, left, and right character states from probs size_t a, l, r; if (sample_probs_sum == 0) { RandomNumberGenerator* rng = GLOBAL_RNG; size_t u = rng->uniform01() * sample_probs.size(); size_t v = 0; for (it = sample_probs.begin(); it != sample_probs.end(); it++) { if (u < v) { const std::vector<unsigned>& states = it->first; a = states[0]; l = states[1]; r = states[2]; endStates[node_index] = a; startStates[left_index] = l; startStates[right_index] = r; break; } v++; } } else { RandomNumberGenerator* rng = GLOBAL_RNG; double u = rng->uniform01() * sample_probs_sum; for (it = sample_probs.begin(); it != sample_probs.end(); it++) { u -= it->second; if (u < 0.0) { const std::vector<unsigned>& states = it->first; a = states[0]; l = states[1]; r = states[2]; endStates[node_index] = a; startStates[left_index] = l; startStates[right_index] = r; break; } } } // recurse towards tips recursivelyDrawJointConditionalAncestralStates(left, startStates, endStates); recursivelyDrawJointConditionalAncestralStates(right, startStates, endStates); } }
/** Perform the move */ void RateAgeBetaShift::performMove( double heat, bool raiseLikelihoodOnly ) { // Get random number generator RandomNumberGenerator* rng = GLOBAL_RNG; TimeTree& tau = tree->getValue(); // pick a random node which is not the root and neithor the direct descendant of the root TopologyNode* node; size_t nodeIdx = 0; do { double u = rng->uniform01(); nodeIdx = size_t( std::floor(tau.getNumberOfNodes() * u) ); node = &tau.getNode(nodeIdx); } while ( node->isRoot() || node->isTip() ); TopologyNode& parent = node->getParent(); // we need to work with the times double parent_age = parent.getAge(); double my_age = node->getAge(); double child_Age = node->getChild( 0 ).getAge(); if ( child_Age < node->getChild( 1 ).getAge()) { child_Age = node->getChild( 1 ).getAge(); } // now we store all necessary values storedNode = node; storedAge = my_age; storedRates[nodeIdx] = rates[nodeIdx]->getValue(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); storedRates[childIdx] = rates[childIdx]->getValue(); } // draw new ages and compute the hastings ratio at the same time double m = (my_age-child_Age) / (parent_age-child_Age); double a = delta * m + 1.0; double b = delta * (1.0-m) + 1.0; double new_m = RbStatistics::Beta::rv(a, b, *rng); double my_new_age = (parent_age-child_Age) * new_m + child_Age; // compute the Hastings ratio double forward = RbStatistics::Beta::lnPdf(a, b, new_m); double new_a = delta * new_m + 1.0; double new_b = delta * (1.0-new_m) + 1.0; double backward = RbStatistics::Beta::lnPdf(new_a, new_b, m); // set the age tau.setAge( node->getIndex(), my_new_age ); tree->touch(); double treeProbRatio = tree->getLnProbabilityRatio(); // set the rates rates[nodeIdx]->setValue( new double((node->getParent().getAge() - my_age) * storedRates[nodeIdx] / (node->getParent().getAge() - my_new_age))); double ratesProbRatio = rates[nodeIdx]->getLnProbabilityRatio(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); rates[childIdx]->setValue( new double((my_age - node->getChild(i).getAge()) * storedRates[childIdx] / (my_new_age - node->getChild(i).getAge()))); ratesProbRatio += rates[childIdx]->getLnProbabilityRatio(); } std::set<DagNode*> affected; tree->getAffectedNodes( affected ); double lnProbRatio = 0; for (std::set<DagNode*>::iterator it = affected.begin(); it != affected.end(); ++it) { (*it)->touch(); lnProbRatio += (*it)->getLnProbabilityRatio(); } if ( fabs(lnProbRatio) > 1E-6 ) { // throw RbException("Likelihood shortcut computation failed in rate-age-proposal."); std::cout << "Likelihood shortcut computation failed in rate-age-proposal." << std::endl; } double hastingsRatio = backward - forward; double lnAcceptanceRatio = treeProbRatio + ratesProbRatio + hastingsRatio; if (lnAcceptanceRatio >= 0.0) { numAccepted++; tree->keep(); rates[nodeIdx]->keep(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); rates[childIdx]->keep(); } } else if (lnAcceptanceRatio < -300.0) { reject(); tree->restore(); rates[nodeIdx]->restore(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); rates[childIdx]->restore(); } } else { double r = exp(lnAcceptanceRatio); // Accept or reject the move double u = GLOBAL_RNG->uniform01(); if (u < r) { numAccepted++; //keep tree->keep(); rates[nodeIdx]->keep(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); rates[childIdx]->keep(); } } else { reject(); tree->restore(); rates[nodeIdx]->restore(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); rates[childIdx]->restore(); } } } }
std::vector<TopologyNode*> TreeNodeAgeUpdateProposal::getNodesInPopulation( Tree &tau, TopologyNode &n ) { // I need all the oldest nodes/subtrees that have the same tips. // Those nodes need to be scaled too. // get the beginning and ending age of the population double max_age = -1.0; if ( n.isRoot() == false ) { max_age = n.getParent().getAge(); } // get all the taxa from the species tree that are descendants of node i double min_age_left = n.getChild(0).getAge(); std::vector<TopologyNode*> speciesTaxa_left; TreeUtilities::getTaxaInSubtree( &n.getChild(0), speciesTaxa_left ); // get all the individuals std::set<TopologyNode*> individualTaxa_left; for (size_t i = 0; i < speciesTaxa_left.size(); ++i) { const std::string &name = speciesTaxa_left[i]->getName(); std::vector<TopologyNode*> ind = tau.getTipNodesWithSpeciesName( name ); for (size_t j = 0; j < ind.size(); ++j) { individualTaxa_left.insert( ind[j] ); } } // create the set of the nodes within this population std::set<TopologyNode*> nodesInPopulationSet; // now go through all nodes in the gene while ( individualTaxa_left.empty() == false ) { // get the first element std::set<TopologyNode*>::iterator it = individualTaxa_left.begin(); // store the pointer TopologyNode *geneNode = *it; // and now remove the element from the list individualTaxa_left.erase( it ); // add this node to our list of node we need to scale, if: // a) this is the root node // b) this is not the root and the age of the parent node is larger than the parent's age of the species node if ( geneNode->getAge() > min_age_left && geneNode->getAge() < max_age && geneNode->isTip() == false ) { // add this node if it is within the age of our population nodesInPopulationSet.insert( geneNode ); } if ( geneNode->isRoot() == false && ( max_age == -1.0 || max_age > geneNode->getParent().getAge() ) ) { // push the parent to our current list individualTaxa_left.insert( &geneNode->getParent() ); } } // get all the taxa from the species tree that are descendants of node i double min_age_right = n.getChild(1).getAge(); std::vector<TopologyNode*> speciesTaxa_right; TreeUtilities::getTaxaInSubtree( &n.getChild(1), speciesTaxa_right ); // get all the individuals std::set<TopologyNode*> individualTaxa_right; for (size_t i = 0; i < speciesTaxa_right.size(); ++i) { const std::string &name = speciesTaxa_right[i]->getName(); std::vector<TopologyNode*> ind = tau.getTipNodesWithSpeciesName( name ); for (size_t j = 0; j < ind.size(); ++j) { individualTaxa_right.insert( ind[j] ); } } // now go through all nodes in the gene while ( individualTaxa_right.empty() == false ) { // get the first element std::set<TopologyNode*>::iterator it = individualTaxa_right.begin(); // store the pointer TopologyNode *geneNode = *it; // and now remove the element from the list individualTaxa_right.erase( it ); // add this node to our list of node we need to scale, if: // a) this is the root node // b) this is not the root and the age of the parent node is larger than the parent's age of the species node if ( geneNode->getAge() > min_age_right && geneNode->getAge() < max_age && geneNode->isTip() == false ) { // add this node if it is within the age of our population nodesInPopulationSet.insert( geneNode ); } if ( geneNode->isRoot() == false && ( max_age == -1.0 || max_age > geneNode->getParent().getAge() ) ) { // push the parent to our current list individualTaxa_right.insert( &geneNode->getParent() ); } } // convert the set into a vector std::vector<TopologyNode*> nodesInPopulation; for (std::set<TopologyNode*>::iterator it = nodesInPopulationSet.begin(); it != nodesInPopulationSet.end(); ++it) { nodesInPopulation.push_back( *it ); } return nodesInPopulation; }
/** * Perform the proposal. * * \return The hastings ratio. */ double TreeNodeAgeUpdateProposal::doProposal( void ) { // Get random number generator RandomNumberGenerator* rng = GLOBAL_RNG; Tree& tau = speciesTree->getValue(); // pick a random node which is not the root and neither the direct descendant of the root TopologyNode* node; do { double u = rng->uniform01(); size_t index = size_t( std::floor(tau.getNumberOfNodes() * u) ); node = &tau.getNode(index); } while ( node->isRoot() || node->isTip() ); TopologyNode& parent = node->getParent(); // we need to work with the times double parent_age = parent.getAge(); double my_age = node->getAge(); double child_Age = node->getChild( 0 ).getAge(); if ( child_Age < node->getChild( 1 ).getAge()) { child_Age = node->getChild( 1 ).getAge(); } // now we store all necessary values storedNode = node; storedAge = my_age; // draw new ages and compute the hastings ratio at the same time double my_new_age = (parent_age-child_Age) * rng->uniform01() + child_Age; // Sebastian: This is for debugging to test if the proposal's acceptance rate is 1.0 as it should be! // my_new_age = my_age; int upslideNodes = 0; int downslideNodes = 0; for ( size_t i=0; i<geneTrees.size(); ++i ) { // get the i-th gene tree Tree& geneTree = geneTrees[i]->getValue(); std::vector<TopologyNode*> nodes = getNodesInPopulation(geneTree, *node ); for (size_t j=0; j<nodes.size(); ++j) { double a = nodes[j]->getAge(); double new_a = a; if ( a > my_age ) { ++upslideNodes; new_a = parent_age - (parent_age - my_new_age)/(parent_age - my_age) * (parent_age - a); } else { ++downslideNodes; new_a = child_Age + (my_new_age - child_Age)/(my_age - child_Age) * (a - child_Age); } // set the new age of this gene tree node geneTree.getNode( nodes[j]->getIndex() ).setAge( new_a ); } // Sebastian: This is only for debugging. It makes the code slower. Hopefully it is not necessary anymore. // geneTrees[i]->touch( true ); } // Sebastian: We need to work on a mechanism to make these proposal safe for non-ultrametric trees! // if (min_age != 0.0) // { // for (size_t i = 0; i < tau.getNumberOfTips(); i++) // { // if (tau.getNode(i).getAge() < 0.0) // { // return RbConstants::Double::neginf; // } // } // } // set the age of the species tree node tau.getNode( node->getIndex() ).setAge( my_new_age ); // compute the Hastings ratio double lnHastingsratio = upslideNodes * log( (parent_age - my_new_age)/(parent_age - my_age) ) + downslideNodes * log( (my_new_age - child_Age)/(my_age - child_Age) ); return lnHastingsratio; }
/** Perform the move */ void RateAgeBetaShift::performMcmcMove( double lHeat, double pHeat ) { // Get random number generator RandomNumberGenerator* rng = GLOBAL_RNG; Tree& tau = tree->getValue(); RbOrderedSet<DagNode*> affected; tree->getAffectedNodes( affected ); double oldLnLike = 0.0; bool checkLikelihoodShortcuts = rng->uniform01() < 0.001; if ( checkLikelihoodShortcuts == true ) { for (RbOrderedSet<DagNode*>::iterator it = affected.begin(); it != affected.end(); ++it) { (*it)->touch(); oldLnLike += (*it)->getLnProbability(); } } // pick a random node which is not the root and neithor the direct descendant of the root TopologyNode* node; size_t nodeIdx = 0; do { double u = rng->uniform01(); nodeIdx = size_t( std::floor(tau.getNumberOfNodes() * u) ); node = &tau.getNode(nodeIdx); } while ( node->isRoot() || node->isTip() ); TopologyNode& parent = node->getParent(); // we need to work with the times double parent_age = parent.getAge(); double my_age = node->getAge(); double child_Age = node->getChild( 0 ).getAge(); if ( child_Age < node->getChild( 1 ).getAge()) { child_Age = node->getChild( 1 ).getAge(); } // now we store all necessary values storedNode = node; storedAge = my_age; storedRates[nodeIdx] = rates[nodeIdx]->getValue(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); storedRates[childIdx] = rates[childIdx]->getValue(); } // draw new ages and compute the hastings ratio at the same time double m = (my_age-child_Age) / (parent_age-child_Age); double a = delta * m + 1.0; double b = delta * (1.0-m) + 1.0; double new_m = RbStatistics::Beta::rv(a, b, *rng); double my_new_age = (parent_age-child_Age) * new_m + child_Age; // compute the Hastings ratio double forward = RbStatistics::Beta::lnPdf(a, b, new_m); double new_a = delta * new_m + 1.0; double new_b = delta * (1.0-new_m) + 1.0; double backward = RbStatistics::Beta::lnPdf(new_a, new_b, m); // set the age tau.getNode(nodeIdx).setAge( my_new_age ); // touch the tree so that the likelihoods are getting stored tree->touch(); // get the probability ratio of the tree double treeProbRatio = tree->getLnProbabilityRatio(); // set the rates double pa = node->getParent().getAge(); double my_new_rate =(pa - my_age) * storedRates[nodeIdx] / (pa - my_new_age); // now we set the new value // this will automcatically call a touch rates[nodeIdx]->setValue( new double( my_new_rate ) ); // get the probability ratio of the new rate double ratesProbRatio = rates[nodeIdx]->getLnProbabilityRatio(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); double a = node->getChild(i).getAge(); double child_new_rate = (my_age - a) * storedRates[childIdx] / (my_new_age - a); // now we set the new value // this will automcatically call a touch rates[childIdx]->setValue( new double( child_new_rate ) ); // get the probability ratio of the new rate ratesProbRatio += rates[childIdx]->getLnProbabilityRatio(); } if ( checkLikelihoodShortcuts == true ) { double lnProbRatio = 0; double newLnLike = 0; for (RbOrderedSet<DagNode*>::iterator it = affected.begin(); it != affected.end(); ++it) { double tmp = (*it)->getLnProbabilityRatio(); lnProbRatio += tmp; newLnLike += (*it)->getLnProbability(); } if ( fabs(lnProbRatio) > 1E-8 ) { double lnProbRatio2 = 0; double newLnLike2 = 0; for (RbOrderedSet<DagNode*>::iterator it = affected.begin(); it != affected.end(); ++it) { double tmp2 = (*it)->getLnProbabilityRatio(); lnProbRatio2 += tmp2; newLnLike2 += (*it)->getLnProbability(); } throw RbException("Likelihood shortcut computation failed in rate-age-proposal."); } } double hastingsRatio = backward - forward; double ln_acceptance_ratio = lHeat * pHeat * (treeProbRatio + ratesProbRatio) + hastingsRatio; if (ln_acceptance_ratio >= 0.0) { numAccepted++; tree->keep(); rates[nodeIdx]->keep(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); rates[childIdx]->keep(); } } else if (ln_acceptance_ratio < -300.0) { reject(); tree->restore(); rates[nodeIdx]->restore(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); rates[childIdx]->restore(); } } else { double r = exp(ln_acceptance_ratio); // Accept or reject the move double u = GLOBAL_RNG->uniform01(); if (u < r) { numAccepted++; //keep tree->keep(); rates[nodeIdx]->keep(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); rates[childIdx]->keep(); } } else { reject(); tree->restore(); rates[nodeIdx]->restore(); for (size_t i = 0; i < node->getNumberOfChildren(); i++) { size_t childIdx = node->getChild(i).getIndex(); rates[childIdx]->restore(); } } } }
/** Perform the move */ double GibbsPruneAndRegraft::performSimpleMove( void ) { // Get random number generator RandomNumberGenerator* rng = GLOBAL_RNG; TimeTree& tau = variable->getValue(); // potential affected nodes for likelihood computation std::set<DagNode *> affected; variable->getAffectedNodes( affected ); double backwardLikelihood = variable->getLnProbability(); for (std::set<DagNode*>::const_iterator it = affected.begin(); it != affected.end(); ++it) { backwardLikelihood += (*it)->getLnProbability(); } int offset = (int) -backwardLikelihood; double backward = exp(backwardLikelihood + offset); // pick a random node which is not the root and neithor the direct descendant of the root TopologyNode* node; do { double u = rng->uniform01(); size_t index = size_t( std::floor(tau.getNumberOfNodes() * u) ); node = &tau.getNode(index); } while ( node->isRoot() || node->getParent().isRoot() ); TopologyNode* parent = &node->getParent(); TopologyNode& grandparent = parent->getParent(); TopologyNode& brother = parent->getChild( 0 ); // check if we got the correct child if ( &brother == node ) { brother = parent->getChild( 1 ); } // collect the possible reattachement points std::vector<TopologyNode*> new_brothers; findNewBrothers(new_brothers, *parent, &tau.getRoot()); std::vector<double> weights = std::vector<double>(new_brothers.size(), 0.0); double sumOfWeights = 0.0; for (size_t i = 0; i<new_brothers.size(); ++i) { // get the new brother TopologyNode* newBro = new_brothers[i]; // do the proposal TopologyNode *newGrandparent = pruneAndRegraft(&brother, newBro, parent, grandparent); // flag for likelihood recomputation variable->touch(); // compute the likelihood of the new value double priorRatio = variable->getLnProbability(); double likelihoodRatio = 0.0; for (std::set<DagNode*>::const_iterator it = affected.begin(); it != affected.end(); ++it) { likelihoodRatio += (*it)->getLnProbability(); } weights[i] = exp(priorRatio + likelihoodRatio + offset); sumOfWeights += weights[i]; // undo proposal pruneAndRegraft(newBro, &brother, parent, *newGrandparent); // restore the previous likelihoods; variable->restore(); } if (sumOfWeights <= 1E-100) { // hack // the proposals have such a small likelihood that they can be neglected // throw new OperatorFailedException("Couldn't find another proposal with a decent likelihood."); return 0.0; } double ran = rng->uniform01() * sumOfWeights; size_t index = 0; while (ran > 0.0) { ran -= weights[index]; index++; } index--; TopologyNode* newBro = new_brothers[index]; // now we store all necessary values storedBrother = &brother; storedNewBrother = newBro; pruneAndRegraft(&brother, newBro, parent, grandparent); double forward = weights[index]; double forwardProb = (forward / sumOfWeights); double backwardProb = (backward / (sumOfWeights - forward + backward)); double hastingsRatio = log(backwardProb / forwardProb); return hastingsRatio; }
void CharacterDependentCladoBirthDeathProcess::recursivelyDrawJointConditionalAncestralStates(const TopologyNode &node, std::vector<size_t>& startStates, std::vector<size_t>& endStates) { size_t node_index = node.getIndex(); if ( node.isTip() == true ) { const AbstractHomologousDiscreteCharacterData& data = static_cast<TreeDiscreteCharacterData*>(this->value)->getCharacterData(); const AbstractDiscreteTaxonData& taxon_data = data.getTaxonData( node.getName() ); endStates[node_index] = taxon_data.getCharacter(0).getStateIndex(); } else { const TopologyNode &left = node.getChild(0); size_t left_index = left.getIndex(); state_type left_likelihoods = partial_likelihoods[left_index]; const TopologyNode &right = node.getChild(1); size_t right_index = right.getIndex(); state_type right_likelihoods = partial_likelihoods[right_index]; // sample characters by their probability conditioned on the branch's start state going to end states state_type branch_conditional_probs = std::vector<double>(2 * num_states, 0); size_t start_state = startStates[node_index]; branch_conditional_probs[ num_states + start_state ] = 1.0; double dt = root_age->getValue() / NUM_TIME_SLICES; double endAge = node.getAge(); double beginAge = node.getParent().getAge(); double current_time_slice = floor(beginAge / dt); bool computed_at_least_one = false; // get cladogenesis event map (sparse speciation rate matrix) const DeterministicNode<MatrixReal>* cpn = static_cast<const DeterministicNode<MatrixReal>* >( cladogenesis_matrix ); const TypedFunction<MatrixReal>& tf = cpn->getFunction(); const AbstractCladogenicStateFunction* csf = dynamic_cast<const AbstractCladogenicStateFunction*>( &tf ); std::map<std::vector<unsigned>, double> eventMap = csf->getEventMap(); // first iterate forward along the branch subtending this node to get the // probabilities of the end states conditioned on the start state while (current_time_slice * dt >= endAge || !computed_at_least_one) { // populate pre-computed extinction probs into branch_conditional_probs if (current_time_slice > 0) { for (size_t i = 0; i < num_states; i++) { branch_conditional_probs[i] = extinction_probabilities[current_time_slice - 1][i]; } } CDCladoSEObserved ode = CDCladoSEObserved(extinction_rates, &Q->getValue(), eventMap, rate->getValue()); boost::numeric::odeint::bulirsch_stoer< state_type > stepper(1E-8, 0.0, 0.0, 0.0); boost::numeric::odeint::integrate_adaptive( stepper, ode , branch_conditional_probs , current_time_slice * dt , (current_time_slice + 1) * dt, dt ); computed_at_least_one = true; current_time_slice--; } std::map<std::vector<unsigned>, double> sample_probs; double sample_probs_sum = 0.0; std::map<std::vector<unsigned>, double>::iterator it; // iterate over each cladogenetic event possible for (it = eventMap.begin(); it != eventMap.end(); it++) { const std::vector<unsigned>& states = it->first; double speciation_rate = it->second; sample_probs[ states ] = 0.0; // we need to sample from the ancestor, left, and right states jointly, // so keep track of the probability of each clado event double prob = left_likelihoods[num_states + states[1]] * right_likelihoods[num_states + states[2]]; prob *= speciation_rate * branch_conditional_probs[num_states + states[0]]; sample_probs[ states ] += prob; sample_probs_sum += prob; } // finally, sample ancestor, left, and right character states from probs size_t a, l, r; RandomNumberGenerator* rng = GLOBAL_RNG; double u = rng->uniform01() * sample_probs_sum; for (it = sample_probs.begin(); it != sample_probs.end(); it++) { u -= it->second; if (u < 0.0) { const std::vector<unsigned>& states = it->first; a = states[0]; l = states[1]; r = states[2]; endStates[node_index] = a; startStates[left_index] = l; startStates[right_index] = r; break; } } // recurse towards tips recursivelyDrawJointConditionalAncestralStates(left, startStates, endStates); recursivelyDrawJointConditionalAncestralStates(right, startStates, endStates); } }
void PhyloBrownianProcessREML::recursiveComputeLnProbability( const TopologyNode &node, size_t nodeIndex ) { // check for recomputation if ( node.isTip() == false && dirtyNodes[nodeIndex] ) { // mark as computed dirtyNodes[nodeIndex] = false; std::vector<double> &p_node = this->partialLikelihoods[this->activeLikelihood[nodeIndex]][nodeIndex]; std::vector<double> &mu_node = this->contrasts[this->activeLikelihood[nodeIndex]][nodeIndex]; // get the number of children size_t num_children = node.getNumberOfChildren(); for (size_t j = 1; j < num_children; ++j) { size_t leftIndex = nodeIndex; const TopologyNode *left = &node; if ( j == 1 ) { left = &node.getChild(0); leftIndex = left->getIndex(); recursiveComputeLnProbability( *left, leftIndex ); } const TopologyNode &right = node.getChild(j); size_t rightIndex = right.getIndex(); recursiveComputeLnProbability( right, rightIndex ); const std::vector<double> &p_left = this->partialLikelihoods[this->activeLikelihood[leftIndex]][leftIndex]; const std::vector<double> &p_right = this->partialLikelihoods[this->activeLikelihood[rightIndex]][rightIndex]; // get the per node and site contrasts const std::vector<double> &mu_left = this->contrasts[this->activeLikelihood[leftIndex]][leftIndex]; const std::vector<double> &mu_right = this->contrasts[this->activeLikelihood[rightIndex]][rightIndex]; // get the propagated uncertainties double delta_left = this->contrastUncertainty[this->activeLikelihood[leftIndex]][leftIndex]; double delta_right = this->contrastUncertainty[this->activeLikelihood[rightIndex]][rightIndex]; // get the scaled branch lengths double v_left = 0; if ( j == 1 ) { v_left = this->computeBranchTime(leftIndex, left->getBranchLength()); } double v_right = this->computeBranchTime(rightIndex, right.getBranchLength()); // add the propagated uncertainty to the branch lengths double t_left = v_left + delta_left; double t_right = v_right + delta_right; // set delta_node = (t_l*t_r)/(t_l+t_r); this->contrastUncertainty[this->activeLikelihood[nodeIndex]][nodeIndex] = (t_left*t_right) / (t_left+t_right); double stdev = sqrt(t_left+t_right); for (int i=0; i<this->numSites; i++) { mu_node[i] = (mu_left[i]*t_right + mu_right[i]*t_left) / (t_left+t_right); // get the site specific rate of evolution double standDev = this->computeSiteRate(i) * stdev; // compute the contrasts for this site and node double contrast = mu_left[i] - mu_right[i]; // compute the probability for the contrasts at this node double lnl_node = RbStatistics::Normal::lnPdf(0, standDev, contrast); // sum up the probabilities of the contrasts p_node[i] = lnl_node + p_left[i] + p_right[i]; } // end for-loop over all sites } // end for-loop over all children } // end if we need to compute something for this node. }
/** * Compute the log-transformed probability of the current value under the current parameter values. * * \return The log-probability density. */ double ConstantRateFossilizedBirthDeathProcess::computeLnProbabilityTimes( void ) const { double lnProbTimes = 0.0; double process_time = getOriginTime(); size_t num_initial_lineages = 0; TopologyNode* root = &value->getRoot(); if (useOrigin) { // If we are conditioning on survival from the origin, // then we must divide by 2 the log survival probability computed by AbstractBirthDeathProcess // TODO: Generalize AbstractBirthDeathProcess to allow conditioning on the origin if ( condition == "survival" ) { lnProbTimes += log( pSurvival(0,process_time) ); } num_initial_lineages = 1; } // if conditioning on root, root node must be a "true" bifurcation event else { if (root->getChild(0).isSampledAncestor() || root->getChild(1).isSampledAncestor()) return RbConstants::Double::neginf; num_initial_lineages = 2; } // variable declarations and initialization double birth_rate = lambda->getValue(); double death_rate = mu->getValue(); double fossil_rate = psi->getValue(); double sampling_prob = rho->getValue(); // get helper variables double a = birth_rate - death_rate - fossil_rate; double c1 = std::fabs(sqrt(a * a + 4 * birth_rate * fossil_rate)); double c2 = -(a - 2 * birth_rate * sampling_prob) / c1; // get node/time variables size_t num_nodes = value->getNumberOfNodes(); // classify nodes int num_sampled_ancestors = 0; int num_fossil_taxa = 0; int num_extant_taxa = 0; int num_internal_nodes = 0; std::vector<double> fossil_tip_ages = std::vector<double>(); std::vector<double> internal_node_ages = std::vector<double>(); for (size_t i = 0; i < num_nodes; i++) { const TopologyNode& n = value->getNode( i ); if ( n.isTip() && n.isFossil() && n.isSampledAncestor() ) { // node is sampled ancestor num_sampled_ancestors++; } else if ( n.isTip() && n.isFossil() && !n.isSampledAncestor() ) { // node is fossil leaf num_fossil_taxa++; fossil_tip_ages.push_back( n.getAge() ); } else if ( n.isTip() && !n.isFossil() ) { // node is extant leaf num_extant_taxa++; } else if ( n.isInternal() && !n.getChild(0).isSampledAncestor() && !n.getChild(1).isSampledAncestor() ) { // node is bifurcation event (a "true" node) internal_node_ages.push_back( n.getAge() ); num_internal_nodes++; } } // add the log probability for the fossilization events if (fossil_rate == 0.0 && num_fossil_taxa + num_sampled_ancestors > 0) { throw RbException("The sampling rate is zero, but the tree has sampled fossils."); } else if (fossil_rate > 0.0) { lnProbTimes += (num_fossil_taxa + num_sampled_ancestors) * log( fossil_rate ); } // add the log probability for sampling the extant taxa lnProbTimes += num_extant_taxa * log( sampling_prob ); // add the log probability of the initial sequences lnProbTimes += lnQ(process_time, c1, c2) - num_initial_lineages * log(1.0 - pHatZero(process_time)) - log(birth_rate); for(size_t i=0; i<internal_node_ages.size(); i++) { double t = internal_node_ages[i]; lnProbTimes += log(2.0 * birth_rate) + lnQ(t, c1, c2); } for(size_t f=0; f < fossil_tip_ages.size(); f++) { double t = fossil_tip_ages[f]; lnProbTimes += log(pZero(t, c1, c2)) - lnQ(t, c1, c2); } return lnProbTimes; }