bool BVH4BuilderFast::splitSequential(BuildRecord& current, BuildRecord& leftChild, BuildRecord& rightChild)
 {
   /* mark as leaf if leaf threshold reached */
   if (current.items() <= QBVH_BUILDER_LEAF_ITEM_THRESHOLD) {
     current.createLeaf();
     return false;
   }
   
   /* calculate binning function */
   Mapping<16> mapping(current.bounds);
   
   /* binning of centroids */
   Binner<16> binner;
   binner.bin(prims,current.begin,current.end,mapping);
   
   /* find best split */
   Split split; 
   binner.best(split,mapping);
   
   /* if we cannot find a valid split, enforce an arbitrary split */
   if (unlikely(split.pos == -1)) split_fallback(prims,current,leftChild,rightChild);
   
   /* partitioning of items */
   else binner.partition(prims, current.begin, current.end, split, mapping, leftChild, rightChild);
   
   if (leftChild.items()  <= QBVH_BUILDER_LEAF_ITEM_THRESHOLD) leftChild.createLeaf();
   if (rightChild.items() <= QBVH_BUILDER_LEAF_ITEM_THRESHOLD) rightChild.createLeaf();	
   return true;
 }
Exemple #2
0
std::vector<int> count(Binner binner) {
  std::vector<int> out;

  int n = binner.size();
  for(int i = 0; i < n; ++i) {
    int bin = binner.bin(i);
    if (bin < 0) continue;
  
    // Make sure there's enough space
    if (bin >= out.size()) {
      out.resize(bin + 1);
    }
    ++out[bin];
  }

  return out;
}
std::vector<double> CanonicalAverager::calculate_weights(const std::vector<double> &energies, const Binner &binner, const DArray &lnG, const BArray &lnG_support, double beta) {
    // Assert the arrays got same shape and are one dimensional
    assert(lnG.same_shape(lnG_support));
    assert(lnG.get_shape().size()==1);

    unsigned int nbins = lnG.get_shape(0);

    // First make a histogram of the energies given as argument
    UArray histogram(nbins);

    for(std::vector<double>::const_iterator it=energies.begin(); it < energies.end(); it++) {
        int bin = binner.calc_bin(*it);

        if (0 <= bin && bin < static_cast<int>(nbins)) {
            histogram(bin)++;
        }
    }

    // Calculate the support for this histogram
    BArray histogram_support = histogram > 0;

    // Calculate the normalization constant for the canonical distribution,
    // by summing over the area with both the histogram and the estimated
    // entropy (lnG) has support. The normalization constant is given by
    //
    // lnZ_beta = log(Z_beta) = log[ sum_E exp( S(E) - beta*E ) ]
    //
    // To calculate this we use the log-sum-exp trick
    BArray support = histogram_support && lnG_support;
    DArray binning = binner.get_binning_centered();

    DArray summands(nbins);

    for (BArray::constwheretrueiterator it = support.get_constwheretrueiterator(); it(); ++it) {
        summands(it) = lnG(it) - beta*binning(it);
    }

    double lnZ_beta = log_sum_exp(summands, support);

    // Now calculate the probability for each bin according to the canonical ensemble
    DArray P_beta(nbins);

    for (BArray::constwheretrueiterator it = support.get_constwheretrueiterator(); it(); ++it) {
        P_beta(it) = exp(-beta*binning(it) + lnG(it) - lnZ_beta);
    }

    // Calculate the weight for each energy in the energy vector
    std::vector<double> weights;

    for(std::vector<double>::const_iterator it=energies.begin(); it < energies.end(); it++) {
        int bin = binner.calc_bin(*it);

        if (0 <= bin && bin < static_cast<int>(nbins)) {
            double weight = 1.0/static_cast<double>(histogram(bin)) * P_beta(bin);
            weights.push_back(weight);
        }
        else {
            weights.push_back(0);
        }
    }

    return weights;
}