/*
   * Reduce (add) distributions from multiple MPI processors.
   */
   void Distribution::reduce(MPI::Intracomm& communicator, int root)
   {
  
      long* totHistogram = new long[nBin_]; 
      communicator.Reduce(histogram_.cArray(), totHistogram, nBin_, MPI::LONG, MPI::SUM, root);
      if (communicator.Get_rank() == root) {
         for (int i=0; i < nBin_; ++i) {
            histogram_[i] = totHistogram[i];
         }
      } else { 
         for (int i=0; i < nBin_; ++i) {
            histogram_[i] = 0.0;
         }
      }
      delete totHistogram;

      long totSample; 
      communicator.Reduce(&nSample_, &totSample, 1, MPI::LONG, MPI::SUM, root);
      if (communicator.Get_rank() == root) {
         nSample_ = totSample;
      } else {
         nSample_ = 0;
      }

      long totReject; 
      communicator.Reduce(&nReject_, &totReject, 1, MPI::LONG, MPI::SUM, root);
      if (communicator.Get_rank() == root) {
         nReject_ = totReject;
      } else {
         nReject_ = 0;
      }

   }
Exemple #2
0
   /*
   * Compute, store and return total number of atoms on all processors.
   */
   void AtomStorage::computeNAtomTotal(MPI::Intracomm& communicator)
   {
      // If nAtomTotal is already set, do nothing and return.
      // if (nAtomTotal_.isSet()) return;

      int nAtomLocal = nAtom();
      int nAtomTotal = 0;
      communicator.Reduce(&nAtomLocal, &nAtomTotal, 1, 
                          MPI::INT, MPI::SUM, 0);
      if (communicator.Get_rank() !=0) {
         nAtomTotal = 0;
      }
      nAtomTotal_.set(nAtomTotal);
   }
Exemple #3
0
// Metropolis-Hastings population size resampling; not used anymore
void resample_popsizes_mh(ArgModel *model, const LocalTrees *trees,
                       bool sample_popsize_recomb, double heat) {
    list<PopsizeConfigParam> &l = model->popsize_config.params;
    double curr_like = sample_popsize_recomb ? calc_arg_prior(model, trees) :
        calc_arg_prior_recomb_integrate(model, trees, NULL, NULL, NULL);
#ifdef ARGWEAVER_MPI
    MPI::Intracomm *comm = model->mc3.group_comm;
    int rank = comm->Get_rank();
    comm->Reduce(rank == 0 ? MPI_IN_PLACE : &curr_like,
                 &curr_like, 1, MPI::DOUBLE, MPI_SUM, 0);
#endif
    for (int rep=0; rep < model->popsize_config.numsample; rep++) {
        int idx=0;
        for (list<PopsizeConfigParam>::iterator it = l.begin();
             it != l.end(); it++) {
            curr_like =
                resample_single_popsize_mh(model, trees, sample_popsize_recomb,
                                           heat, it, curr_like, idx++);
        }
    }

}