void nTM_each_document(WordIndex *words, unsigned int length) { // Increment the total number of documents seen _static_nTM->documents++; // Create a new SparseCount for N^d_z (count of topics in this document) count_list_add(_static_nTM->ndzs); for (int i=0; i<length; i++) { // Sample a topic from P(z|w) TopicIndex assigned_topic = nTM_sample_topic(_static_nTM, words[i], _static_nTM->documents-1); // If we sampled the new topic, create it first if (assigned_topic == _static_nTM->topics) { nTM_create_topic(_static_nTM); } nTM_assign_topic(_static_nTM,words[i],assigned_topic,_static_nTM->documents); } progressbar_inc(_static_progressbar); if (_static_nTM->documents % _static_nTM->interval == 0) { progressbar_finish(_static_progressbar); nTM_save_assignments(_static_nTM); _static_progressbar = progressbar_new("Training", _static_nTM->interval); } }
/** *Example for statusbar and progressbar usage **/ int main(void) { // Status bar statusbar *status = statusbar_new("Indeterminate"); for (int i=0; i<30; i++) { usleep(SLEEP_MS); statusbar_inc(status); } statusbar_finish(status); status = statusbar_new("Status bar with a really long label"); for (int i=0; i<10; i++) { usleep(SLEEP_MS); statusbar_inc(status); } statusbar_finish(status); status = statusbar_new_with_format("Custom","(|)|"); for (int i=0; i<30; i++) { usleep(SLEEP_MS); statusbar_inc(status); } statusbar_finish(status); // Progress bar int max = 240; progressbar *progress = progressbar_new("Smooth",max); for(int i=0;i<max;i++) { usleep(SLEEP_MS); progressbar_inc(progress); } progressbar_finish(progress); progress = progressbar_new("Three Second Task with a long label",3); for(int i=0;i<3;i++) { progressbar_inc(progress); sleep(1); } progressbar_finish(progress); progress = progressbar_new("Fast",100); for(int i=0;i<100;i++) { usleep(SLEEP_MS); progressbar_inc(progress); } progressbar_finish(progress); }
void nTM_train(nTM *model) { _static_nTM = model; _static_progressbar = progressbar_new("Training", model->interval); count_list_add(model->ndzs); line_corpus_each_document(model->corpus, &nTM_each_document); progressbar_finish(_static_progressbar); _static_nTM = NULL; }
void CW_train(CW *model) { srand(time(NULL)); static_cw_model = model; static_progress = progressbar_new("Building Representations", model->corpus->document_count); target_corpus_each_document(model->corpus, &CW_each_document); progressbar_finish(static_progress); int iteration_count = 50; progressbar *iteration_progress = progressbar_new("Iterating", iteration_count); for (int i=0; i<iteration_count; i++) { CW_iteration(model); progressbar_inc(iteration_progress); } progressbar_finish(iteration_progress); static_cw_model = NULL; CW_save_target_wordmap(model); }
ultranest_results ultranest(LikelihoodFunc, const char * root, const int ndim, const int max_samples, const double logZtol, const int nlive_points, unsigned int nsteps) { unsigned int i = 0; double tolerance = logZtol; int pbar_maxval = nlive_points; char pbar_label[200]; pbar_label[0] = 0; ultranest_draw_state * drawer = ultranest_draw_init(Like, ndim, nsteps, 1.); #ifdef ENABLE_PROGRESSBAR progressbar * pbar = progressbar_new("initialising...", pbar_maxval); pbar->format[1] = '='; #endif ultranest_state * sampler = ultranest_sampler_init(Like, root, ndim, nlive_points, drawer); #ifdef ENABLE_PROGRESSBAR progressbar_update_label(pbar, "sampling..."); progressbar_update(pbar, i); #endif point * current = ultranest_sampler_next(sampler); /* begin integration */ double logVolremaining = 0; double logwidth = log(1 - exp(-1. / sampler->nlive_points)); weighted_point * weights = NULL; ultranest_results res; res.root = root; double wi = logwidth + current->L; double logZ = wi; double H = current->L - logZ; double logZerr; while(1) { logwidth = log(1 - exp(-1. / sampler->nlive_points)) + logVolremaining; logVolremaining -= 1. / sampler->nlive_points; weights = (weighted_point *) realloc(weights, (i+sampler->nlive_points+1) * sizeof(weighted_point)); weights[i].p = current; weights[i].weight = logwidth; i = i + 1; logZerr = sqrt(H / sampler->nlive_points); // double i_final = -sampler->nlive_points * (-sampler->Lmax + logsubexp(fmax(tolerance - logZerr, logZerr / 100.) + logZ, logZ)); // i_final = -sampler.nlive_points * (-sampler.Lmax + log(exp(max(tolerance - logZerr, logZerr / 100.) + logZ) - exp(logZ))) assert(fmax(tolerance - logZerr, logZerr / 100.) > 0); assert(fmax(tolerance - logZerr, logZerr / 100.) + logZ > logZ); int i_final = -(sampler->nlive_points * (-sampler->Lmax + logsubexp(logZ, fmax(tolerance - logZerr, logZerr / 100.)))); pbar_maxval = (int) fmin(fmax(i+1, i_final), i+100000); ultranest_sampler_integrate_remainder(sampler, logwidth, logVolremaining, logZ, NULL); progressbar_settext(pbar_label, i, pbar_maxval, sampler->nlive_points, sampler->ndraws, logZ, sampler->remainderZ, logZerr, sampler->remainderZerr, current, sampler->ndim); #ifdef ENABLE_PROGRESSBAR progressbar_update_label(pbar, pbar_label); pbar->max = pbar_maxval; progressbar_update(pbar, i); #else printf("%s\n", pbar_label); #endif if (i > sampler->nlive_points) { // tolerance double total_error = logZerr + sampler->remainderZerr; if (max_samples > 0 && (unsigned) max_samples < sampler->ndraws) { #ifdef ENABLE_PROGRESSBAR progressbar_finish(pbar); #endif printf("maximum number of samples reached\n"); break; } if (total_error < tolerance) { #ifdef ENABLE_PROGRESSBAR progressbar_finish(pbar); #endif printf("tolerance reached\n"); break; } // we want to make maxContribution as small as possible // but if it becomes 10% of logZerr, that is enough if (sampler->remainderZerr < logZerr / 10.) { #ifdef ENABLE_PROGRESSBAR progressbar_finish(pbar); #endif printf("tolerance will not improve: remainder error (%.3f) is much smaller than systematic errors (%.3f)\n", sampler->remainderZerr, logZerr); break; } } current = ultranest_sampler_next(sampler); wi = logwidth + current->L; double logZnew = logaddexp(logZ, wi); H = exp(wi - logZnew) * current->L + exp(logZ - logZnew) * (H + logZ) - logZnew; logZ = logZnew; } // not needed for integral, but for posterior samples, otherwise there // is a hole in the most likely parameter ranges. i += ultranest_sampler_integrate_remainder(sampler, logwidth, logVolremaining, logZ, weights + i); logZerr += sampler->remainderZerr; logZ = logaddexp(logZ, sampler->remainderZ); res.logZ = logZ; res.logZerr = logZerr; res.ndraws = sampler->ndraws; res.niter = i; res.H = H; res.weighted_points = weights; printf("ULTRANEST result: lnZ = %.2f +- %.2f\n", res.logZ, res.logZerr); write_results(root, res, ndim); return res; }