예제 #1
0
void BasicSkewGenerator::UpdateSkewGenerator(ShortStack &total_theory) {
  // put everything to null
  ResetUpdate();

  //for (unsigned int i_read=0; i_read<total_theory.my_hypotheses.size(); i_read++){
  for (unsigned int i_ndx = 0; i_ndx < total_theory.valid_indexes.size(); i_ndx++) {
    unsigned int i_read = total_theory.valid_indexes[i_ndx];
    AddCrossUpdate(total_theory.my_hypotheses[i_read]);
  }
  DoLatentUpdate();  // new latent predictors for sigma
}
예제 #2
0
void BasicSigmaGenerator::SimplePrior(){
   // sum r^2
   ZeroAccumulator();
   float basic_weight = prior_weight; // prior strength
   for (unsigned int i_level = 0; i_level<accumulated_sigma.size(); i_level++)
   {
      // expected variance per level
      // this is fairly arbitrary as we expect the data to overcome our weak prior here
      float square_level = i_level*i_level+1.0f;  // avoid zero division
      // approximate quadratic increase in sigma- should be linear, but empirically we see more than expected
      float sigma_square =  prior_sigma_regression[0]+prior_sigma_regression[1]*square_level;
      sigma_square *=sigma_square; // push squared value
      PushLatent(basic_weight, (float) i_level, sigma_square, true);
   }
   DoLatentUpdate();  // the trivial model for sigma by intensity done
}
예제 #3
0
// important: residuals do not need to be reset before this operation (predictions have been corrected for bias already)
void BasicSigmaGenerator::UpdateSigmaGenerator(ShortStack &total_theory) {
// put everything to null
  ResetUpdate();

//  float k_zero = 1.0f;

  for (unsigned int i_ndx = 0; i_ndx < total_theory.valid_indexes.size(); i_ndx++) {
    unsigned int i_read = total_theory.valid_indexes[i_ndx];
    AddCrossUpdate(total_theory.my_hypotheses[i_read]);
   }
  // now that I've established basic weight, I can update
  for (unsigned int i_ndx = 0; i_ndx < total_theory.valid_indexes.size(); i_ndx++) {
    unsigned int i_read = total_theory.valid_indexes[i_ndx];
    // additional variability from cluster shifting
      // bayesian multidimensional normal
      AddShiftCrossUpdate(total_theory.my_hypotheses[i_read], k_zero);
  }

  DoLatentUpdate();  // new latent predictors for sigma
}