static VALUE Error_erf_Z(VALUE self, VALUE x) { return rb_float_new(gsl_sf_erf_Z(NUM2DBL(x))); }
double gaussPdf(double x) { return gsl_sf_erf_Z(x); };
double compute_objective_function(Dataset *data) { int i, j, k, idx, lij, rij; int *count = (int *) malloc(sizeof(int) * (data->num_steps + 1)); double p; double Q = 0; double *alpha = data->alpha, *beta = data->beta; int *beta_idx = (int *) malloc(sizeof(int) * data->num_steps); beta_idx[0] = 0; // Start with the expectation of the sum of priors over all images for (j = 0; j < data->num_tasks; j++) { for (k = 0; k < data->num_leaves; k++) { idx = get_z_index(j, k, data); if (data->priorZ[idx] == 0) continue; // Skip ignored Z Q += data->probZ[idx] * log(data->priorZ[idx]); } } for (k = 0; k < data->num_leaves; k++) { // True class // class-dependent beta if (data->mode == 2) { for (int h = 0; h < data->last_step[k] - 1; h++) beta_idx[h+1] = get_beta_index(h, get_step(k, h, data), data); } for (idx = 0; idx < data->num_labels; idx++) { i = data->labels[idx].labelerId; j = data->labels[idx].imageIdx; lij = data->labels[idx].label; // task-and-class dependent beta if (data->mode == 3) { for (int h = 0; h < data->last_step[k] - 1; h++) beta_idx[h+1] = get_beta_index(j, h, get_step(k, h, data), data); } // Find rij get_num_diff_steps(k, count, j, data); rij = get_rij(lij, k, data); // Compute logP(l) switch (data->mode) { case 1: // Steps GLAD (task-dependent) p = calc_log_ProbL_GLAD_t(lij, rij, data->last_step[k], data->alpha[i], data->beta[j], count[rij]); break; case 2: // Steps GLAD (class dependent) case 3: // Steps GLAD (task-and-class dependent) p = calc_log_ProbL_GLAD_ctc(rij, data->last_step[k], data->alpha[i], data->beta, beta_idx, count[rij]); break; case 4: // Steps Rasch model (task dependent) // This is presented as another example of extenstion p = calc_log_ProbL_rasch_t(lij, rij, data->last_step[k], data->alpha[i], data->beta[j], count[rij]); break; default: std::cerr << "Invalid mode " << data->mode << std::endl; abort(); } Q += p * data->probZ[get_z_index(j, k, data)]; } if (isnan(Q)) { std::cerr << "isnan(Q) is True after computing Q from labels: Q = " << Q << std::endl; abort(); } } // Reguralization penalty (default: lambda = 0) for (i = 0; i < data->num_labelers; i++) { Q -= data->lambdaAlpha * (data->alpha[i] - data->priorAlpha[i]) * (data->alpha[i] - data->priorAlpha[i]); } for (idx = 0; idx < data->num_beta; idx++) { Q -= data->lambdaBeta * (data->beta[idx] - data->priorBeta[idx]) * (data->beta[idx] - data->priorBeta[idx]); } // Add Gaussian (standard normal) prior for alpha if (!data->ignore_priorAlpha) { for (i = 0; i < data->num_labelers; i++) { Q += log(gsl_sf_erf_Z(alpha[i] - data->priorAlpha[i])); if (isnan(Q)) { std::cerr << "isnan(Q) is True after adding Gaussian prior for alpha" << std::endl; abort(); } } } // Add Gaussian (standard normal) prior for beta if (!data->ignore_priorBeta) { for (idx = 0; idx < data->num_beta; idx++) { Q += log(gsl_sf_erf_Z(beta[idx] - data->priorBeta[idx])); if (isnan(Q)) { std::cerr << "isnan(Q) is after True adding Gaussian prior for beta" << std::endl; abort(); } } } if (data->debug) { std::cerr << "Q = " << Q << std::endl; } free(count); free(beta_idx); return Q; }