int main() { unsigned int num_iterations = 1000; // number of iterations to run float v[2] = {0.0405f, 0.5f}; // gradsearch gs = gradsearch_create(NULL,v,2,gserror,LIQUID_OPTIM_MINIMIZE); // execute search one iteration at a time unsigned int i; float rmse; for (i=0; i<num_iterations; i++) { rmse = gserror(NULL,v,2); gradsearch_step(gs); if (((i+1)%100)==0) gradsearch_print(gs); } gradsearch_destroy(gs); // print results for (i=0; i<41; i++) printf(" z = %12.8f, g = %12.8f (%12.8f)\n", z[i], lngamma_test[i], sandbox_lngammaf(z[i], v)); printf("rmse = %12.4e;\n", rmse); printf("v0 = %12.8f\n", v[0]); printf("v1 = %12.8f\n", v[1]); printf("done.\n"); return 0; }
int main() { unsigned int num_parameters = 8; // search dimensionality unsigned int num_iterations = 100; // number of iterations to run float target_utility = 0.01f; // target utility float v[num_parameters]; // optimum vector // ... intialize v ... // create gradsearch object gradsearch gs = gradsearch_create(NULL, v, num_parameters, &myutility, LIQUID_OPTIM_MINIMIZE); // execute batch search gradsearch_execute(gs, num_iterations, target_utility); // clean it up gradsearch_destroy(gs); }
int main() { // options unsigned int num_samples = 400; // number of samples float sig = 0.1f; // noise variance unsigned int num_iterations = 1000; // number of iterations to run float v[3] = {1, 1, 1}; unsigned int i; // range float xmin = 0.0f; float xmax = 6.0f; float dx = (xmax - xmin) / (num_samples-1); // generate data set float x[num_samples]; float y[num_samples]; for (i=0; i<num_samples; i++) { x[i] = xmin + i*dx; y[i] = sincf(x[i]) + randnf()*sig; } struct gsdataset q = {x, y, num_samples}; // create gradsearch object gradsearchprops_s gsprops; gradsearchprops_init_default(&gsprops); gsprops.delta = 1e-6f; // gradient approximation step size gsprops.gamma = 0.002f; // vector step size gsprops.alpha = 0.1f; // momentum parameter gsprops.mu = 0.999f; // decremental gamma paramter (best if not exactly 1.0) gradsearch gs = gradsearch_create((void*)&q, v, 3, gserror, LIQUID_OPTIM_MINIMIZE, &gsprops); float rmse; // execute search //rmse = gradsearch_run(gs, num_iterations, -1e-6f); // open output file FILE*fid = fopen(OUTPUT_FILENAME,"w"); fprintf(fid,"%% %s : auto-generated file\n", OUTPUT_FILENAME); fprintf(fid,"clear all;\n"); fprintf(fid,"close all;\n"); // execute search one iteration at a time fprintf(fid,"u = zeros(1,%u);\n", num_iterations); for (i=0; i<num_iterations; i++) { rmse = gserror((void*)&q,v,3); fprintf(fid,"u(%3u) = %12.4e;\n", i+1, rmse); gradsearch_step(gs); if (((i+1)%100)==0) gradsearch_print(gs); } // print results printf("\n"); gradsearch_print(gs); printf(" c0 = %12.8f, opt = 1\n", v[0]); printf(" c1 = %12.8f, opt = 0\n", v[1]); printf(" c2 = %12.8f, opt = 1\n", v[2]); printf(" rmse = %12.4e\n", rmse); fprintf(fid,"figure;\n"); fprintf(fid,"semilogy(u);\n"); fprintf(fid,"xlabel('iteration');\n"); fprintf(fid,"ylabel('error');\n"); fprintf(fid,"title('gradient search results');\n"); fprintf(fid,"grid on;\n"); // save sampled data set for (i=0; i<num_samples; i++) { fprintf(fid," x(%4u) = %12.8f;\n", i+1, x[i]); fprintf(fid," y(%4u) = %12.8f;\n", i+1, y[i]); fprintf(fid," y_hat(%4u) = %12.8f;\n", i+1, gsfunc(x[i],v)); } fprintf(fid,"figure;\n"); fprintf(fid,"plot(x,y,'x', x,y_hat,'-');\n"); fprintf(fid,"xlabel('x');\n"); fprintf(fid,"ylabel('f(x)');\n"); fprintf(fid,"grid on;\n"); fprintf(fid,"legend('data','fit',1);\n"); fclose(fid); printf("results written to %s.\n", OUTPUT_FILENAME); gradsearch_destroy(gs); return 0; }