Exemple #1
0
 //------------------------------------------------------------
 double MLB::logp(const ChoiceData & dp)const{
   // for right now...  assumes all choices are available to everyone
   //    uint n = dp->n_avail();
   wsp.resize(nch_);
   fill_eta(dp, wsp);
   uint y = dp.value();
   double ans = wsp[y] - lse(wsp);
   return ans;
 }
 void MLVSS::update(const ChoiceData &dp, const Vec & wgts, const Vec &u){
   const Mat & X(dp.X(false));      // 'false' means omit columns
   xtwx_.add_inner(X, wgts, false);   // corresponding to subject X's at
   xtwu_ += X.Tmult(wgts*u);         // choice level 0.
   sym_ = false;
   for (int i = 0; i < wgts.size(); ++i) {
     weighted_sum_of_squares_ += wgts[i] * square(u[i]);
   }
 }
Exemple #3
0
 void MlvsDataImputer::impute_latent_data_point(const ChoiceData &dp,
                                                SufficientStatistics *suf,
                                                RNG &rng) {
   model_->fill_eta(dp, eta);  // eta+= downsampling_logprob
   if (downsampling_) eta += log_sampling_probs_;  //
   uint M = model_->Nchoices();
   uint y = dp.value();
   assert(y < M);
   double loglam = lse(eta);
   double logzmin = rlexp_mt(rng, loglam);
   u[y] = -logzmin;
   for (uint m = 0; m < M; ++m) {
     if (m != y) {
       double tmp = rlexp_mt(rng, eta[m]);
       double logz = lse2(logzmin, tmp);
       u[m] = -logz;
     }
     uint k = unmix(rng, u[m] - eta[m]);
     u[m] -= mu_[k];
     wgts[m] = sigsq_inv_[k];
   }
   suf->update(dp, wgts, u);
 }