Esempio n. 1
0
int main(int argc, char** argv)
{
    cc_config_t config;
    dataset_t* data;
    clique_list_t* cl;
    matrix_t* matrix;
    group_list_t* groups;
#ifdef WITH_LIMITS
    struct rlimit cpu_limit = { 900, 900 };
    struct rlimit mem_limit = { 419430400, 419430400};

    setrlimit(RLIMIT_CPU, &cpu_limit  );
    setrlimit(RLIMIT_AS, &mem_limit  );
#endif
    configure(&config, argc, argv);
    data = dataset_load(config.datfile);
    cl = clique_list_load(config.clfile);
    printf("Generate matrix...\n");
    matrix = matrix_create(data, cl, sizeof(int), NULL, boolean_connection);
    /* matrix_print_int(matrix); */
    printf("Generate group list...\n");
    groups = group_list_from_clique_list(cl);
    /* group_list_print(groups); */
    printf("Merging groups...\n");
    group_list_merge(groups, boolean_strength, NULL, matrix);
    group_list_save(groups, config.grpfile);
    group_list_destroy(groups);
    matrix_destroy(matrix);
    clique_list_destroy(cl);
    dataset_destroy(&data);
    return 0;
}
Esempio n. 2
0
int main(int argc, char** argv)
{
    map_config_t config;
    dataset_t* dataset;
    map_t* map;

    configure(&config, argc, argv);

    if(config.ot == OT_CREATE)
    {
        dataset = dataset_load(config.infile);
        if(dataset)
        {
            map = map_create(dataset, &config.level);
            if(config.print)
                map_print(map);
            map_save(config.outfile, map);
            map_destroy(map);
        }
        dataset_destroy(&dataset);
    }
    else if(config.ot == OT_READ)
    {
        map = map_load(config.infile);
        if(config.print)
            map_print(map);
        map_destroy(map);
    }
    return 0;
}
Esempio n. 3
0
int main(int argc, char** argv)
{
    wc_config_t config;
    dataset_t* data;
    clique_list_t* cl;
    matrix_t* matrix;
    group_list_t* groups;
    wc_data_t wc_data;
#ifdef WITH_LIMITS
    struct rlimit cpu_limit = { 900, 900 };
    struct rlimit mem_limit = { 419430400, 419430400};

    setrlimit(RLIMIT_CPU, &cpu_limit  );
    setrlimit(RLIMIT_AS, &mem_limit  );
#endif
    configure(&config, argc, argv);
    data = dataset_load(config.datfile);
    cl = clique_list_load(config.clfile);
    wc_data.emap = enumerated_map_load(config.empfile);
    wc_data.fi = frequent_itemset_list_load(config.fifile, 0);
    printf("Generate matrix...\n");
    wc_data.threshold = config.threshold;
    matrix = matrix_create(data, cl, sizeof(double), &wc_data, alignment_amount);
    /* matrix_print_int(matrix); */
    printf("Generate group list...\n");
    groups = group_list_from_clique_list(cl);
    /* group_list_print(groups); */
    printf("Merging groups...\n");
    wc_data.matrix = matrix;
    group_list_merge(groups, wc_strength, NULL, &wc_data);
    group_list_save(groups, config.grpfile);
    group_list_destroy(groups);
    matrix_destroy(matrix);
    enumerated_map_destroy(wc_data.emap);
    frequent_itemset_list_destroy(wc_data.fi);
    clique_list_destroy(cl);
    dataset_destroy(&data);
    return 0;
} 
Esempio n. 4
0
int main(int argc, char** argv)
{
    /* configuration for the visualization program */
    visualize_config_t config;
    /* reference to the loaded dataset */
    dataset_t* dataset;
    group_list_t* groups = NULL;

    configure(&config, argc, argv);

    /* load dataset from input file */
    dataset = dataset_load(config.input);
    if(dataset)
    {
        /* if a group list is defined, load that one too */
        if(config.have_groups)
            groups = group_list_load(config.grpfile);

        /* if the dataset is supposed to be printed to stdout in text format,
         * then do so */
        if(config.print)
            dataset_print(dataset);

        /* now draw the dataset */
        dataset_draw(&config, dataset, groups);

        /* release group list from memory if there is one */
        if(groups)
            group_list_destroy(groups);

        /* release dataset from memory */
        dataset_destroy(&dataset);

    }

    return 0;
}
Esempio n. 5
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options.
   */
  double mag = 1.0, etol = 10e-3, detol = 10e-8, rate = 0.1;
  int seed = 0, minepochs = 10, maxepochs = 100;
  char *afunc = "tanh", *alg = "cgpr", *srch = "cubic";

  /* The OPTION array is used to easily parse command-line options.
   */
  OPTION opts[] = {
    { "-seed",      OPT_INT,    &seed,      "random number seed"           },
    { "-minepochs", OPT_INT,    &minepochs, "minimum # of training steps"  },
    { "-maxepochs", OPT_INT,    &maxepochs, "maximum # of training steps"  },
    { "-afunc",     OPT_STRING, &afunc,     "act. function for hidden node"},
    { "-mag",       OPT_DOUBLE, &mag,       "max size of initial weights"  },
    { "-etol",      OPT_DOUBLE, &etol,      "error tolerance"              },
    { "-detol",     OPT_DOUBLE, &detol,     "delta error tolerance"        },
    { "-rate",      OPT_DOUBLE, &rate,      "learning rate"                },
    { "-alg",       OPT_STRING, &alg,       "training algorithm"           },
    { "-srch",      OPT_STRING, &srch,      "line search"                  },
    { NULL,         OPT_NULL,   NULL,       NULL                           }
  };

  /* The DATASET and the NN that we will use.
   */
  DATASET *data;
  NN *nn;


  /* Get the command-line options.
   */
  get_options(argc, argv, opts, help_string, NULL, 0);

  /* Set the random seed.
   */
  srandom(seed);

  nn = nn_create("4 2 4");   /* 2-2-1 architecture. */
  nn_link(nn, "0 -l-> 1");   /* Inputs to hidden link. */
  nn_link(nn, "1 -l-> 2");   /* Hidden to output link. */

  /* Set the Activation functions of the hidden and output layers and
   * initialize the weights to uniform random values between -/+mag.
   */
  nn_set_actfunc(nn, 1, 0, afunc);
  nn_set_actfunc(nn, 2, 0, "logistic");
  nn_init(nn, mag);
 
  /* Convert the C matrix into a DATASET.  There are two inputs, one
   * output, and four patterns total.
   */
  data = dataset_create(&dsm_matrix_method,
			dsm_c_matrix(&rawdata[0][0], 4, 4, 4));

  /* Tell the NN how to train itself.
   */
  nn->info.train_set = data;
  nn->info.opt.min_epochs = minepochs;
  nn->info.opt.max_epochs = maxepochs;
  nn->info.opt.error_tol = etol;
  nn->info.opt.delta_error_tol = detol;
  nn->info.opt.hook = training_hook;
  nn->info.opt.rate = rate;

  if(strcmp(srch, "hybrid") == 0)
    nn->info.opt.stepf = opt_lnsrch_hybrid;
  else if(strcmp(srch, "golden") == 0)
    nn->info.opt.stepf = opt_lnsrch_golden;
  else if(strcmp(srch, "cubic") == 0)
    nn->info.opt.stepf = opt_lnsrch_cubic;
  else if(strcmp(srch, "none") == 0)
    nn->info.opt.stepf = NULL;
  
  if(strcmp(alg, "cgpr") == 0)
    nn->info.opt.engine = opt_conjgrad_pr;
  else if(strcmp(alg, "cgfr") == 0)
    nn->info.opt.engine = opt_conjgrad_fr;
  else if(strcmp(alg, "qndfp") == 0)
    nn->info.opt.engine = opt_quasinewton_dfp;
  else if(strcmp(alg, "qnbfgs") == 0)
    nn->info.opt.engine = opt_quasinewton_bfgs;
  else if(strcmp(alg, "lm") == 0)
    nn->info.opt.engine = opt_levenberg_marquardt;
  else if(strcmp(alg, "bp") == 0) {
    nn->info.opt.engine = opt_gradient_descent;
    nn->info.opt.stepf = NULL;
    nn->info.subsample = 1;
    nn->info.opt.stepf = nn_lnsrch_search_then_converge;
    nn->info.opt.momentum = 0.9;
    nn->info.stc_eta_0 = 1;
    nn->info.stc_tau = 100;
  }

  /* Do the training.  This will print out the epoch number and
   * The error level until trianing halts via one of the stopping
   * criterion.
   */
  nn_train(nn);

  /* Print out each input training pattern and the respective
   * NN output.
   */
  printf("--------------------\n");
  nn_offline_test(nn, data, testing_hook);

#if 1
  { 
    const double dw = 0.000001;
    double jj1, jj2, *Rg, Rin[4], Rdout[4], dedy[4], err;
    int j, k, l, n = nn->numweights;
    Rg = allocate_array(1, sizeof(double), nn->numweights);
    nn->need_all_grads = 1;
    for(k = 0; k < 4; k++) {
      
      nn_forward(nn, &rawdata[k][0]);
      for(l = 0; l < nn->numout; l++)
	dedy[l] = nn->y[l] - rawdata[k][l];
      nn_backward(nn, dedy);
      for(l = 0; l < nn->numout; l++)
	/* Fixed */
	Rin[l] =  nn->dx[l] - dedy[l];

      nn_Rforward(nn, Rin, NULL);
      for(l = 0; l < nn->numout; l++)
	/* Fixed */
        Rdout[l] = nn->Ry[l] - nn->Rx[l];

      nn_Rbackward(nn, Rdout);
      nn_get_Rgrads(nn, Rg);

      for(j = 0; j < n; j++) {
	nn_forward(nn, &rawdata[k][0]);
	for(l = 0; l < nn->numout; l++)
	  dedy[l] = nn->y[l] - rawdata[k][l];
	nn_backward(nn, dedy);
	jj1 = 0;
	for(l = 0; l < nn->numout; l++)
	  jj1 += 0.5 * (dedy[l] - nn->dx[l]) * (dedy[l] - nn->dx[l]);

	*nn->weights[j] += dw;
	nn_forward(nn, &rawdata[k][0]);
	for(l = 0; l < nn->numout; l++)
	  dedy[l] = nn->y[l] - rawdata[k][l];
	nn_backward(nn, dedy);
	jj2 = 0;
	for(l = 0; l < nn->numout; l++)
	  jj2 += 0.5 * (dedy[l] - nn->dx[l]) * (dedy[l] - nn->dx[l]);
	err = fabs(Rg[j] - (jj2 - jj1) / dw) / fabs(Rg[j]);
	printf("(%d, %2d) ja = % .5e  jn = % .5e  error = % .2e  %s\n",
	       k, j, Rg[j], (jj2 - jj1) / dw,
	       err, (err > 10e-4) ? "BAD" : "GOOD");
	*nn->weights[j] -= dw;
      }
    }
  }
#endif

  /* Free up everything.
   */
  nn_destroy(nn);
  dsm_destroy_matrix(dataset_destroy(data));
  nn_shutdown();

  /* Bye.
   */
  exit(0); 
}
Esempio n. 6
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options.
   */
  double var = 0.0;
  int seed = 0, nbasis = 4, norm = 0;
  
  /* The OPTION array is used to easily parse command-line options.
   */
  OPTION opts[] = {
    { "-var",    OPT_DOUBLE, &var,    "variance of basis functions"  },
    { "-seed",   OPT_INT,    &seed,   "random number seed"           },
    { "-nbasis", OPT_INT,    &nbasis, "number of basis functions"    },
    { "-norm",   OPT_SWITCH, &norm,   "normalized basis functions?"  },
    { NULL,      OPT_NULL,   NULL,    NULL                           }
  };

  /* The DATASET and the NN that we will use.
   */
  SERIES *trainser, *testser;
  DATASET *trainds, *testds;
  NN *nn;


  /* Get the command-line options.
   */
  get_options(argc, argv, opts, help_string, NULL, 0);

  /* Set the random seed.
   */
  srandom(seed);

  testser =  series_read_ascii("hp41.dat");
  testser->x_width = 2;
  testser->y_width = testser->offset = testser->x_delta = testser->y_delta = 1;
  testser->step = testser->x_width + testser->y_width;
  testds = dataset_create(&dsm_series_method, testser);

  trainser =  series_read_ascii("hp21.dat");
  trainser->x_width = 2;
  trainser->y_width = trainser->offset = trainser->x_delta = trainser->y_delta = 1;
  trainser->step = trainser->x_width + trainser->y_width;
  trainds = dataset_create(&dsm_series_method, trainser);

  nn_rbf_basis_normalized = norm;
  nn = nn_create_rbf(nbasis, var, trainds);

  nn->links[0]->need_grads = 1;
  nn->info.train_set = trainds;
  nn->info.opt.min_epochs = 20;
  nn->info.opt.max_epochs = 200;
  nn->info.opt.error_tol = 1e-3;
  nn->info.opt.delta_error_tol = 1e-8;
  nn->info.opt.hook = training_hook;
  nn->info.opt.stepf = opt_lnsrch_cubic;
  nn->info.opt.engine = opt_quasinewton_bfgs;
  nn_train(nn);

  /* Now, let's see how well the RBF performs.
   */
  nn_offline_test(nn, testds, testing_hook);

  /* Free up everything.
   */
  nn_destroy(nn);
  series_destroy(dataset_destroy(testds));
  series_destroy(dataset_destroy(trainds));
  nn_shutdown();

  /* Bye.
   */
  exit(0); 
}
Esempio n. 7
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options.
   */
  double mag = 0.1, etol = 10e-3, detol = 10e-8;
  int seed = 0, minepochs = 10, maxepochs = 100;
  char *afunc = "tanh";

  /* The OPTION array is used to easily parse command-line options.
   */
  OPTION opts[] = {
    { "-seed",      OPT_INT,    &seed,      "random number seed"           },
    { "-minepochs", OPT_INT,    &minepochs, "minimum # of training steps"  },
    { "-maxepochs", OPT_INT,    &maxepochs, "maximum # of training steps"  },
    { "-afunc",     OPT_STRING, &afunc,     "act. function for hidden node"},
    { "-mag",       OPT_DOUBLE, &mag,       "max size of initial weights"  },
    { "-etol",      OPT_DOUBLE, &etol,      "error tolerance"              },
    { "-detol",     OPT_DOUBLE, &detol,     "delta error tolerance"        },
    { NULL,         OPT_NULL,   NULL,       NULL                           }
  };

  /* The DATASET and the NN that we will use.
   */
  DATASET *data;
  NN *nn;

  /* Set it so that xalloc_report() will print to the screen.
   */
  ulog_threshold = ULOG_DEBUG;
  
  /* Get the command-line options.
   */
  get_options(argc, argv, opts, "Train a NN on XOR data.\n");

  /* Set the random seed.
   */
  srandom(seed);

  /* Create the neural network.  This one has two inputs, one hidden node,
   * and a single output.  The input are connected to the hidden node 
   * and the outputs, while the hidden node is just connected to the
   * outputs.
   */
  nn = nn_create("2 1 1");   /* 2-1-1 architecture. */
  nn_link(nn, "0 -l-> 1");   /* Inputs to hidden link. */
  nn_link(nn, "1 -l-> 2");   /* Hidden to output link. */
  nn_link(nn, "0 -l-> 2");   /* Input to output short-circuit link. */  

  /* Set the Activation functions of the hidden and output layers and
   * initialize the weights to uniform random values between -/+mag.
   */
  nn_set_actfunc(nn, 1, 0, afunc);
  nn_set_actfunc(nn, 2, 0, "logistic");
  nn_init(nn, mag);
 
  /* Convert the C matrix into a DATASET.  There are two inputs, one
   * output, and four patterns total.
   */
  data = dataset_create(&dsm_matrix_method,
			dsm_c_matrix(&xor_data[0][0], 2, 1, 4));

  /* Tell the NN how to train itself.
   */
  nn->info.train_set = data;
  nn->info.opt.min_epochs = minepochs;
  nn->info.opt.max_epochs = maxepochs;
  nn->info.opt.error_tol = etol;
  nn->info.opt.delta_error_tol = detol;

  nn_train(nn);
  nn_offline_test(nn, data, NULL);

  nn_write(nn, "xor.net");
  nn_destroy(nn);
  nn = nn_read("xor.net");
  nn_destroy(nn);
  unlink("xor.net");

  dsm_destroy_matrix(dataset_destroy(data));
  nn_shutdown();

  xalloc_report();

  /* Bye.
   */
  exit(0); 
}
Esempio n. 8
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options. */
  double noise = 0.0;
  int seed = 0, nbasis = 4, points = 100;

  /* The OPTION array is used to easily parse command-line options. */
  OPTION opts[] = {
    { "-noise",  OPT_DOUBLE, &noise,  "variance of Gaussian noise"   },
    { "-seed",   OPT_INT,    &seed,   "random number seed"           },
    { "-nbasis", OPT_INT,    &nbasis, "number of basis functions"    },
    { "-points", OPT_INT,    &points, "number of data points"        },
    { NULL,      OPT_NULL,   NULL,    NULL                           }
  };

  /* The DATASET and the NN that we will use. */
  DATASET *data;
  NN *nn;

  /* Get the command-line options.  */
  get_options(argc, argv, opts, help_string, NULL, 0);
  srandom(seed);

  /* Make the data, and build a CNLS net. */
  data = make_data(points, noise);
  nn = nn_create("2 (%d %d) %d 1", nbasis, nbasis, nbasis);
  nn_set_actfunc(nn, 1, 0, "linear");
  nn_set_actfunc(nn, 1, 1, "exp(-x)");
  nn_set_actfunc(nn, 2, 0, "linear");
  nn_set_actfunc(nn, 3, 0, "linear");

  nn_link(nn, "0 -l-> (1 0)");
  nn_link(nn, "0 -e-> (1 1)");
  nn_link(nn, "(1 1) -l-> 3");
  nn_link(nn, "(1 0) (1 1) -p-> 2");
  nn_link(nn, "2 -l-> 3");

  nn_init(nn, 1);

  nn->info.train_set = data;
  nn->info.opt.min_epochs = 10;
  nn->info.opt.max_epochs = 100;
  nn->info.opt.error_tol = 1e-5;
  nn->info.opt.delta_error_tol = 1e-7;
  nn->info.opt.hook = training_hook;
  nn_train(nn);

  /* Now, let's see how well the NN performs.
   */
  nn_offline_test(nn, data, testing_hook);

  /* Free up everything.
   */
  nn_destroy(nn);
  series_destroy(dataset_destroy(data));
  nn_shutdown();

  /* Bye.
   */
  exit(0); 
}
Esempio n. 9
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options.
   */
  double mag = 1.0, etol = 10e-3, detol = 10e-8;
  double rate = 0.1, moment = 0.9, subsamp = 0, decay = 0.9;
  int seed = 0, minepochs = 10, maxepochs = 100;
  char *afunc = "tanh";
  void *linealg = opt_lnsrch_golden, *optalg = opt_conjgrad_pr;

  OPTION_SET_MEMBER optsetm[] = {
    { "cgpr",   opt_conjgrad_pr },
    { "cgfr",   opt_conjgrad_fr },
    { "qndfp",  opt_quasinewton_dfp },
    { "qnbfgs", opt_quasinewton_bfgs },
    { "lm",     opt_levenberg_marquardt },
    { "gd",     opt_gradient_descent },
    { NULL,     NULL }
  };

  OPTION_SET_MEMBER linesetm[] = {
    { "golden", opt_lnsrch_golden },
    { "hybrid", opt_lnsrch_hybrid },
    { "cubic",  opt_lnsrch_cubic },
    { "stc",    nn_lnsrch_search_then_converge },
    { "none",   NULL },
    { NULL,     NULL }
  };

  OPTION_SET lineset = { &linealg, linesetm };
  OPTION_SET optset = { &optalg, optsetm };
    
  /* The OPTION array is used to easily parse command-line options.
   */
  OPTION opts[] = {
    { "-seed",      OPT_INT,    &seed,      "random number seed"           },
    { "-minepochs", OPT_INT,    &minepochs, "minimum # of training steps"  },
    { "-maxepochs", OPT_INT,    &maxepochs, "maximum # of training steps"  },
    { "-afunc",     OPT_STRING, &afunc,     "act. function for hidden node"},
    { "-mag",       OPT_DOUBLE, &mag,       "max size of initial weights"  },
    { "-etol",      OPT_DOUBLE, &etol,      "error tolerance"              },
    { "-detol",     OPT_DOUBLE, &detol,     "delta error tolerance"        },
    { "-rate",      OPT_DOUBLE, &rate,      "learning rate"                },
    { "-moment",    OPT_DOUBLE, &moment,    "momentum rate"                },
    { "-alg",       OPT_SET,    &optset,    "training algorithm"           },
    { "-subsamp",   OPT_DOUBLE, &subsamp,   "subsample value"  },
    { "-decay",     OPT_DOUBLE, &decay,     "stochastic decay"  },
    { "-srch",      OPT_SET,    &lineset,   "line search" },
    { NULL,         OPT_NULL,   NULL,       NULL                           }
  };

  /* The DATASET and the NN that we will use.
   */
  DATASET *data;
  NN *nn;

  /* Get the command-line options.
   */
  get_options(argc, argv, opts, help_string, NULL, 0);

  /* Set the random seed.
   */
  srandom(seed);

  /* Create the neural network.  This one has two inputs, one hidden node,
   * and a single output.  The input are connected to the hidden node 
   * and the outputs, while the hidden node is just connected to the
   * outputs.
   */
  nn = nn_create("2 1 1");   /* 2-1-1 architecture. */
  nn_link(nn, "0 -l-> 1");   /* Inputs to hidden link. */
  nn_link(nn, "1 -l-> 2");   /* Hidden to output link. */
  nn_link(nn, "0 -l-> 2");   /* Input to output short-circuit link. */  

  /* Set the Activation functions of the hidden and output layers and
   * initialize the weights to uniform random values between -/+mag.
   */
  nn_set_actfunc(nn, 1, 0, afunc);
  nn_set_actfunc(nn, 2, 0, "logistic");
  nn_init(nn, mag);
 
  /* Convert the C matrix into a DATASET.  There are two inputs, one
   * output, and four patterns total.
   */
  data = dataset_create(&dsm_matrix_method,
			dsm_c_matrix(&xor_data[0][0], 2, 1, 4));

  /* Tell the NN how to train itself.
   */
  nn->info.train_set = data;
  nn->info.opt.min_epochs = minepochs;
  nn->info.opt.max_epochs = maxepochs;
  nn->info.opt.error_tol = etol;
  nn->info.opt.delta_error_tol = detol;
  nn->info.opt.hook = training_hook;
  nn->info.opt.rate = rate;
  nn->info.opt.momentum = moment;
  nn->info.opt.decay = decay;
  nn->info.subsample = subsamp;
  if(subsamp != 0) {
    nn->info.subsample = subsamp;
    nn->info.opt.stochastic = 1;
  }
  nn->info.opt.stepf = linealg;
  nn->info.opt.engine = optalg;
  nn->info.stc_eta_0 = 1;
  nn->info.stc_tau = 100;


  /* Do the training.  This will print out the epoch number and
   * The error level until trianing halts via one of the stopping
   * criterion.
   */
  nn_train(nn);
  nn->info.subsample = 0;

  /* Print out each input training pattern and the respective
   * NN output.
   */
  printf("--------------------\n");
  nn_offline_test(nn, data, testing_hook);

  /* Free up everything.
   */
  nn_destroy(nn);
  dsm_destroy_matrix(dataset_destroy(data));
  nn_shutdown();

  /* Bye.
   */
  exit(0); 
}
Esempio n. 10
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options. */
  double var = 0.25, *x, **J, err;
  int seed = 0, nbasis = 12, points = 200, i;

  /* The OPTION array is used to easily parse command-line options. */
  OPTION opts[] = {
    { "-var",    OPT_DOUBLE, &var,    "variance of basis functions"  },
    { "-seed",   OPT_INT,    &seed,   "random number seed"           },
    { "-nbasis", OPT_INT,    &nbasis, "number of basis functions"    },
    { "-points", OPT_INT,    &points, "number of data points"        },
    { NULL,      OPT_NULL,   NULL,    NULL                           }
  };

  /* The DATASET and the NN that we will use. */
  DATASET *data;
  NN *nn;

  /* Get the command-line options. */
  get_options(argc, argv, opts, help_string, NULL, 0);

  srandom(seed);

  /* Make the data, and build an rbf from it. */
  data = make_data(points);

  nn = nn_create("2 2");
  nn_link(nn, "0 -q-> 1");
  nn_set_actfunc(nn, 1, 0, "linear");

  nn->info.train_set = data;
  nn->info.opt.min_epochs = 10;
  nn->info.opt.max_epochs = 25;
  nn->info.opt.error_tol = 10e-5;
  nn->info.opt.delta_error_tol = 10e-6;
  nn->info.opt.hook = training_hook;
  nn->info.opt.engine = opt_quasinewton_bfgs;
  nn_train(nn);

  J = allocate_array(2, sizeof(double), 2, 2);
  
  /* Now test to see of nn_jacobian() works. */
  for(i = 0; i < points; i++) {
    x = dataset_x(data, i);
    nn_jacobian(nn, x, &J[0][0]);

#if 0
    printf("% 2.2f\t% 2.2f\t% 2.2f\t% 2.2f\n", nn->x[0], nn->x[1],
	   nn->y[0], nn->y[1]);
    printf("% 2.2f\t% 2.2f\t% 2.2f\t% 2.2f\n", J[0][0], J[0][1],
	   J[1][0], J[1][1]);
    printf("% 2.2f\t% 2.2f\t% 2.2f\t% 2.2f\n", df1dx1(x[0], x[1]),
	   df1dx2(x[0], x[1]), df2dx1(x[0], x[1]), df2dx2(x[0], x[1]));
    printf("--\n");
#endif
#if 1
    err = J[0][0] - df1dx1(x[0], x[1]);
    err = err * err;
    printf("% 2.2f\t", err);

    err = J[0][1] - df1dx2(x[0], x[1]);
    err = err * err;
    printf("% 2.2f\t", err);

    err = J[1][0] - df2dx1(x[0], x[1]);
    err = err * err;
    printf("% 2.2f\t", err);

    err = J[1][1] - df2dx2(x[0], x[1]);
    err = err * err;
    printf("% 2.2f\n", err);
#endif
  }

  /* Free up everything. */
  deallocate_array(J);
  nn_destroy(nn);
  series_destroy(dataset_destroy(data));
  nn_shutdown();

  exit(0); 
}
Esempio n. 11
0
int main(int argc, char** argv)
{
    FILE* f;
    metadata_t* meta;
    fp_context_t* context;
    struct sort_config sc;
    dataset_t* ds;
    char target[256];
    stream_t* stream;
    config_t conf;
    external_dataset_t* ext;
    int i; 

    if(argc < 2)
    {
        printf("Barf!\n");
        exit(-1);
    }
    
    if((f = fopen(argv[1], "r")) == NULL)
    {
        printf("Cannot open dataset file %s.\n", argv[1]);
        return -1;
    }
    meta = metadata_read(f);
    sc.verbose = 1;
    sc.normalize = 0;
    sc.denormalize = 0;
    sc.benchmark = 1;
    sc.find_order = ITERATIVE;
    sc.index = KEEP;
    sc.cmp = HILBERT;
    sc.print = stdout;
    context = fp_create_context(&sc, meta->dimz, meta->dimf, meta->start_order);
    ds = dataset_read(f, meta, context);
    fclose(f);
    dataset_print(ds, FALSE);
    
    strcpy(target, argv[1]);
    strcat(target, ".stream");
    printf("Number of records: %d\n", ds->n_records);
    printf("Record size: %d\n", context->record_size);

    conf.memory_size = 1000; 
    conf.block_size = 0x100;
    conf.record_size = context->record_size;
    stream = stream_create(&conf, target);
    stream_open(stream, O_CREAT | O_TRUNC | O_SYNC | O_WRONLY);
    dataset_convert(ds, stream);
    stream_close(stream);
    stream_open(stream, O_SYNC | O_RDONLY);
    ext = external_dataset_create(stream, meta, ds->n_records);

    for(i = 0; i < ds->n_records / MEMORY_RECORDS(stream); i++)
    {
        memory_read(stream, ext->mem->records, MEMORY_RECORDS(stream));
        ext->mem->n_records = MEMORY_RECORDS(stream);
        dataset_print(ext->mem, FALSE);
    }
    if(ds->n_records % MEMORY_RECORDS(stream))
    {
        memory_read(stream, ext->mem->records, ds->n_records % MEMORY_RECORDS(stream));
        ext->mem->n_records = ds->n_records % MEMORY_RECORDS(stream);
        dataset_print(ext->mem, FALSE);
    }
    printf("Stream position: %ld vs. %ld\n", stream->pos, stream_tell(stream));
    external_dataset_sort(ext);
    
    
    external_dataset_destroy(ext);
    stream_destroy(stream);
    stats_print();
    dataset_destroy(ds);
    fp_destroy_context(context);
    metadata_destroy(meta);
    return 0;
}
Esempio n. 12
0
dataset_t* dataset_load(const char* filename)
{
    dataset_t* dataset;
    int fd;
    int i;

    if((fd = open(filename, O_RDONLY)) == -1)
    {
        printf("Cannot open input file '%s'\n", filename);
        return NULL;
    }
    
    /* allocate memory for dataset structure */
    dataset = malloc(sizeof(dataset_t));
    if(dataset == NULL)
    {
        printf("Cannot allocate memory for dataset structure.\n");
        return NULL;
    }
    memset(dataset, 0, sizeof(dataset_t));

    read(fd, &dataset->grid_size, sizeof(int));
    read(fd, &dataset->max_t, sizeof(int));
    read(fd, &dataset->n_trajectories, sizeof(int));
    dataset->trajectories = 
        malloc(dataset->n_trajectories * sizeof(trajectory_t));
    if(dataset->trajectories == NULL)
    {
        printf("Cannot allocate memory for trajectories.\n");
        dataset_destroy(&dataset);
        return NULL;
    }
    memset(dataset->trajectories, 0, dataset->n_trajectories * sizeof(trajectory_t));
    for(i = 0; i < dataset->n_trajectories; i++)
    {
        read(fd, &dataset->trajectories[i].trajectory_id, sizeof(trajectory_id_t));
        read(fd, &dataset->trajectories[i].n_samples, sizeof(int));
        dataset->trajectories[i].samples = 
            malloc(dataset->trajectories[i].n_samples * sizeof(sample_t));
        if(dataset->trajectories[i].samples == NULL) 
        {
            printf("Cannot allocate memory for samples of trajectory: %d\n",
                   dataset->trajectories[i].trajectory_id);
            dataset_destroy(&dataset);
            return NULL;
        }
        read(fd, dataset->trajectories[i].samples, 
             sizeof(sample_t) * dataset->trajectories[i].n_samples);
    }
    
    /* load all known groups */
    read(fd, &dataset->n_groups, sizeof(int));
    dataset->groups = malloc(sizeof(group_t) * dataset->n_groups);
    if(dataset->groups == NULL)
    {
        printf("Cannot allocate memory for group structures.\n");
        dataset_destroy(&dataset);
        return NULL;
    }
    memset(dataset->groups, 0, sizeof(group_t) * dataset->n_groups);
    for(i = 0; i < dataset->n_groups; i++)
    {
        read(fd, &dataset->groups[i].group_id, sizeof(group_id_t));
        read(fd, &dataset->groups[i].n_trajectories, sizeof(int));
        dataset->groups[i].trajectories = 
            malloc(sizeof(trajectory_id_t) * dataset->groups[i].n_trajectories);
        if(dataset->groups[i].trajectories == NULL)
        {
            printf("Cannot allocate memory for trajectory ID list for "\
                   "group: %d\n", dataset->groups[i].group_id);
            dataset_destroy(&dataset);
            return NULL;
        }
        read(fd, dataset->groups[i].trajectories, 
             sizeof(trajectory_id_t) * dataset->groups[i].n_trajectories);
    }

    return dataset;
}