//------------------------------------------------------------ double MLB::logp(const ChoiceData & dp)const{ // for right now... assumes all choices are available to everyone // uint n = dp->n_avail(); wsp.resize(nch_); fill_eta(dp, wsp); uint y = dp.value(); double ans = wsp[y] - lse(wsp); return ans; }
double DPMM::logp(const Vector &x) const { int number_of_components = mixture_components_.size(); double ans = 0; if (number_of_components == 1) { ans += mixture_components_[0]->logp(x); return ans; } Vector counts = allocation_counts(); // The Dirichlet process is the limit of finite mixture models // with symmetric Dirichlet priors (with total mass alpha) on the // mixing weights. Vector probs(number_of_components, alpha() / number_of_components); probs += counts; probs /= sum(probs); // Posterior mode of mixing weights. Vector log_probs = log(probs); Vector wsp = log_probs; for (int i = 0; i < number_of_components; ++i) { wsp[i] += mixture_components_[i]->logp(x); } ans += lse(wsp); return ans; }
void MlvsDataImputer::impute_latent_data_point(const ChoiceData &dp, SufficientStatistics *suf, RNG &rng) { model_->fill_eta(dp, eta); // eta+= downsampling_logprob if (downsampling_) eta += log_sampling_probs_; // uint M = model_->Nchoices(); uint y = dp.value(); assert(y < M); double loglam = lse(eta); double logzmin = rlexp_mt(rng, loglam); u[y] = -logzmin; for (uint m = 0; m < M; ++m) { if (m != y) { double tmp = rlexp_mt(rng, eta[m]); double logz = lse2(logzmin, tmp); u[m] = -logz; } uint k = unmix(rng, u[m] - eta[m]); u[m] -= mu_[k]; wgts[m] = sigsq_inv_[k]; } suf->update(dp, wgts, u); }
Vec & MLB::predict(Ptr<ChoiceData> dp, Vec &ans)const{ fill_eta(*dp, ans); ans = exp(ans-lse(ans)); return ans; }
double PCR::response_prob(uint r, const Vector & Theta, bool logsc)const{ fill_eta(Theta); double lognc = lse(eta_); double ans = eta_[r] - lognc; return logsc ? ans : exp(ans); }