// do a hard classification as though the reads were independent
// i.e. look more like the data in the BAM file
void PosteriorInference::StartAtHardClassify(ShortStack &total_theory, bool update_frequency, float start_frequency) {
  // just to allocate
  ResizeToMatch(total_theory);
  if (update_frequency) {
    max_freq = start_frequency;
    max_ll = total_theory.PosteriorFrequencyLogLikelihood(max_freq, data_reliability, ALL_STRAND_KEY);
  } 
  total_theory.UpdateResponsibility(max_freq, data_reliability);
}
void PosteriorInference::DetailedUpdateStep(ShortStack &total_theory, bool update_frequency){
    DoPosteriorFrequencyScan(total_theory, update_frequency, ALL_STRAND_KEY); // update max frequency using new likelihoods -> estimate overall max likelihood
    total_theory.UpdateResponsibility(max_freq, data_reliability); // update cluster responsibilities
}
void PosteriorInference::QuickUpdateStep(ShortStack &total_theory){
     UpdateMaxFreqFromResponsibility(total_theory, ALL_STRAND_KEY);
    total_theory.UpdateResponsibility(max_freq, data_reliability); // update cluster responsibilities
}
// initialize the ensemble 
void PosteriorInference::StartAtNull(ShortStack &total_theory, bool update_frequency) {
  DoPosteriorFrequencyScan(total_theory, update_frequency, ALL_STRAND_KEY);
  total_theory.UpdateResponsibility(max_freq, data_reliability);
}