void loglikdistrib(Int_t ntrials = 10000, Bool_t print = kFALSE) { // compute distribution of log likelihood value TH1D * hmc = gStack[gPadNr][gOrder[gPadNr][0]]; TH1D * hdata = gStack[gPadNr][gMaxProcess-1]; Int_t nbins = hmc->GetNbinsX(); Double_t loglik = loglikelihood(hmc, hdata, 1, nbins); TH1D * htest = new TH1D(*hdata); TH1D * lldistrib = new TH1D("lldistrib", "log(Likelihood) distribution", 1000, loglik-200, loglik+200); setopt(lldistrib); for (Int_t n = 0; n < ntrials; n++) { // generate poisson around theorie for (Int_t i = 1; i <= nbins; i++) { htest->SetBinContent(i, gRandom->Poisson(hmc->GetBinContent(i))); } lldistrib->Fill(loglikelihood(hmc, htest, 1, nbins)); } TCanvas * llcanvas = new TCanvas("llcanvas", "Log(Likelihood) distribution", 40, 40, 800, 600); setopt(llcanvas); lldistrib->SetFillColor(kYellow); lldistrib->Draw(); lldistrib->GetYaxis()->SetTitle("Anzahl Ereignisse"); lldistrib->GetXaxis()->SetTitle("-ln L"); // autozoom Int_t lowbin = 1; while (lldistrib->GetBinContent(lowbin) == 0) lowbin++; Int_t highbin = lldistrib->GetNbinsX(); while (lldistrib->GetBinContent(highbin) == 0) highbin--; lldistrib->SetAxisRange(lldistrib->GetBinLowEdge(lowbin), lldistrib->GetBinLowEdge(highbin)); TH1D * hworse = (TH1D *) lldistrib->Clone(); for (Int_t nbin = 1; nbin < 501; nbin++) { hworse->SetBinContent(nbin, 0); } hworse->SetFillColor(95); hworse->Draw("same"); Double_t pvalue = lldistrib->Integral(501,1000) / lldistrib->Integral(); TLatex * tex = new TLatex(0.18, 0.96, Form("-ln L_{obs} = %5.2f", loglik)); tex->SetNDC(); tex->SetTextAlign(13); tex->Draw(); tex = new TLatex(0.18, 0.86, Form("CL_{obs} = %.3f", pvalue)); tex->SetNDC(); tex->SetTextAlign(13); tex->Draw(); TLine * l = new TLine(loglik, 0, loglik, lldistrib->GetMaximum()); l->SetLineWidth(3); l->SetLineColor(kBlue); l->Draw(); llcanvas->Modified(); llcanvas->Update(); if (print) llcanvas->Print("lldistrib.pdf"); cd(gPadNr+1); }
void Chain_Factorial::update_x_BlockGibbs(){ if(nrows_gibbs == K){ // forward step FHMM_forward_step(pi, A, emission_probs, P_FHMM, loglik_marginal, k_restricted, n, x, restricted_space); // now backward sampling FHMM_backward_sampling(x, P_FHMM, k_restricted, n, restricted_space); } else{ for(int i=0; i<all_combinations.ncol(); i++){ // Restrict the state space to block Gibbs updates IntegerVector which_rows_fixed = all_combinations(_, i); //IntegerVector which_rows_fixed = sample_helper(K, K-nrows_gibbs); restricted_space = construct_all_restricted_space(k_restricted, which_rows_fixed, mapping); // forward step FHMM_forward_step(pi, A, emission_probs, P_FHMM, loglik_marginal, k_restricted, n, x, restricted_space); // now backward sampling FHMM_backward_sampling(x, P_FHMM, k_restricted, n, restricted_space); } } // conditional loglikelihood loglik_cond = loglikelihood(x, emission_probs, n); convert_x_to_X(); }
T_ensemble_list ptsample_pl(T_prior&& logprior, T_likelihood&& loglikelihood, const T_ensemble_list& ensemble_list, T_rng&& rng, const T_beta_list& beta_list, bool perform_swap, size_t nthread_allowed=1, typename std::result_of<T_prior(typename element_type_trait<typename element_type_trait<T_ensemble_list>::element_type>::element_type)>::type a=2) { using T=typename std::result_of<T_prior(typename element_type_trait<typename element_type_trait<T_ensemble_list>::element_type>::element_type)>::type; using T_var=typename element_type_trait<typename element_type_trait<T_ensemble_list>::element_type>::element_type; auto new_ensemble_list=clone(ensemble_list); size_t ntemp=get_size(ensemble_list); size_t nwalker=get_size(get_element(ensemble_list,0)); if(perform_swap) { /* for(size_t i=0;i<ntemp;++i) { shuffle(get_element(new_ensemble_list,i),rng); } */ for(size_t i=0;i<ntemp-1;++i) { T beta1=beta_list[i]; T beta2=beta_list[i+1]; if(beta1==beta2) { mcmc_exception e("beta list should not contain duplicated elements"); throw e; } for(size_t j=0;j<nwalker;++j) { auto var1=as<T_var>(get_element(get_element(new_ensemble_list,i),j)); auto var2=as<T_var>(get_element(get_element(new_ensemble_list,i+1),j)); T ep=exchange_prob([&logprior,&loglikelihood](const T_var& x){return logprior(x)+loglikelihood(x);},var1,var2,beta1,beta2); if(urng<T>(rng)<ep) { auto temp=clone<T_var>(get_element(get_element(new_ensemble_list,i),j)); set_element(get_element(new_ensemble_list,i),j, get_element(get_element(new_ensemble_list,i+1),j)); set_element(get_element(new_ensemble_list,i+1),j,temp); } } } } for(size_t i=0;i<ntemp;++i) { T beta=get_element(beta_list,i); set_element(new_ensemble_list,i,ensemble_sample([&logprior,&loglikelihood,beta](const T_var& x){ T lp=logprior(x); T ll=loglikelihood(x); if(std::isinf(lp)||std::isinf(ll)) { return lp; } return lp+ll*beta; },get_element(new_ensemble_list,i),rng,nthread_allowed,a)); } return new_ensemble_list; }
int main(int argc, char **argv){ int n_points= 0; double *x; double *y; double *y_model; int n_steps = 10000; double *A; double *B; double *C; double *D; double *loglike; double loglike_prime; double loglike_here; double alpha; double r; int i; n_points = count_lines(argv[1]); //fprintf(stdout, "File %s has %d lines\n", argv[1], n_points); x = init_array(n_points); y = init_array(n_points); y_model = init_array(n_points); load_data(argv[1], x, y, n_points); /*initialize chains*/ A = init_array(n_steps); B = init_array(n_steps); C = init_array(n_steps); D = init_array(n_steps); loglike = init_array(n_steps); /*first step*/ A[0] = 10.0; B[0] = 10.0; C[0] = 10.0; D[0] = 10.0; model(A[0], B[0],C[0],D[0], x, y_model, n_points); loglike[0] = loglikelihood(y, y_model, n_points); for(i=1;i<n_steps;i++){ A[i] = new_value(A[i-1], 0.1); B[i] = new_value(B[i-1], 0.1); C[i] = new_value(C[i-1], 0.1); D[i] = new_value(D[i-1], 0.1); model(A[i], B[i], C[i], D[i],x, y_model, n_points); loglike[i] = loglikelihood(y, y_model, n_points); r = MIN(1.0, exp(loglike[i] - loglike[i-1])); alpha = drand48(); if(alpha < r){ A[i] = A[i]; B[i] = B[i]; C[i] = C[i]; D[i] = D[i]; }else{ A[i] = A[i-1]; B[i] = B[i-1]; C[i] = C[i-1]; D[i] = D[i-1]; } fprintf(stdout, "%f %f %f %f %f\n", A[i], B[i], C[i], D[i], loglike[i]); } return 0; }