Esempio n. 1
0
int main(void)
{
    int i;
    int cyc;

    nn_sys_t nnins;
    nn_init(&nnins, 2, 7, 1, 100, 0.5, 0.00001);
    init_teacher_vect(&nnins);

    for (cyc = 0; cyc < 30000; cyc++) {
        float e = 0.0;
        for (i = 0; i < nnins.teacher_n; i++) {
            nn_bp(&nnins, i);
            e += nn_calcerr(&nnins, i);
        }

        fprintf(stderr, "e:%f\r", e);
    }

    for (i = 0; i < 10000; i++) {
        float rx = (float)rand() / RAND_MAX;
        float ry = (float)rand() / RAND_MAX;
        float inputarr[2] = {rx, ry};
        nn_compute(&nnins, inputarr);
        printf("#XY %f %f %f %f\n", rx, ry, nnins.output[0], nnins.output[0] - rx * rx / 2 - ry * ry / 2);
    }

    printf("\n");

    nn_free(&nnins);
    return 0;
}
Esempio n. 2
0
File: test.c Progetto: dhowden/bsdnt
nn_t alloc_redzoned_nn(len_t n)
{
   nn_t a = nn_init(n + 2*REDZONE_WORDS);
   char * redzone1 = (char *) a;
   char * redzone2 = (char *) (a + REDZONE_WORDS + n);
   long i;

   for (i = 0; i < REDZONE_WORDS*sizeof(word_t); i++)
   {
      redzone1[i] = REDZONE_BYTE;
      redzone2[i] = REDZONE_BYTE;
   }
  
   return a + REDZONE_WORDS;
}
Esempio n. 3
0
static gboolean
libnotify_notify_new (const char *title, const char *text, GtkStatusIcon *icon)
{
	void *noti;

	if (!nn_mod)
	{
		nn_mod = g_module_open ("libnotify", G_MODULE_BIND_LAZY);
		if (!nn_mod)
		{
			nn_mod = g_module_open ("libnotify.so.1", G_MODULE_BIND_LAZY);
			if (!nn_mod)
				return FALSE;
		}

		if (!g_module_symbol (nn_mod, "notify_init", (gpointer)&nn_init))
			goto bad;
		if (!g_module_symbol (nn_mod, "notify_uninit", (gpointer)&nn_uninit))
			goto bad;
		if (!g_module_symbol (nn_mod, "notify_notification_new_with_status_icon", (gpointer)&nn_new_with_status_icon))
			goto bad;
		if (!g_module_symbol (nn_mod, "notify_notification_new", (gpointer)&nn_new))
			goto bad;
		if (!g_module_symbol (nn_mod, "notify_notification_show", (gpointer)&nn_show))
			goto bad;
		if (!g_module_symbol (nn_mod, "notify_notification_set_timeout", (gpointer)&nn_set_timeout))
			goto bad;
		if (!nn_init (PACKAGE_NAME))
			goto bad;
	}

	text = strip_color (text, -1, STRIP_ALL|STRIP_ESCMARKUP);
	title = strip_color (title, -1, STRIP_ALL);
	noti = nn_new (title, text, XCHATSHAREDIR"/pixmaps/xchat.png", NULL);
	g_free ((char *)title);
	g_free ((char *)text);

	nn_set_timeout (noti, prefs.input_balloon_time*1000);
	nn_show (noti, NULL);
	g_object_unref (G_OBJECT (noti));

	return TRUE;

bad:
	g_module_close (nn_mod);
	nn_mod = NULL;
	return FALSE;
}
Esempio n. 4
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options.
   */
  double mag = 1.0, etol = 10e-3, detol = 10e-8, rate = 0.1;
  int seed = 0, minepochs = 10, maxepochs = 100;
  char *afunc = "tanh", *alg = "cgpr", *srch = "cubic";

  /* The OPTION array is used to easily parse command-line options.
   */
  OPTION opts[] = {
    { "-seed",      OPT_INT,    &seed,      "random number seed"           },
    { "-minepochs", OPT_INT,    &minepochs, "minimum # of training steps"  },
    { "-maxepochs", OPT_INT,    &maxepochs, "maximum # of training steps"  },
    { "-afunc",     OPT_STRING, &afunc,     "act. function for hidden node"},
    { "-mag",       OPT_DOUBLE, &mag,       "max size of initial weights"  },
    { "-etol",      OPT_DOUBLE, &etol,      "error tolerance"              },
    { "-detol",     OPT_DOUBLE, &detol,     "delta error tolerance"        },
    { "-rate",      OPT_DOUBLE, &rate,      "learning rate"                },
    { "-alg",       OPT_STRING, &alg,       "training algorithm"           },
    { "-srch",      OPT_STRING, &srch,      "line search"                  },
    { NULL,         OPT_NULL,   NULL,       NULL                           }
  };

  /* The DATASET and the NN that we will use.
   */
  DATASET *data;
  NN *nn;


  /* Get the command-line options.
   */
  get_options(argc, argv, opts, help_string, NULL, 0);

  /* Set the random seed.
   */
  srandom(seed);

  nn = nn_create("4 2 4");   /* 2-2-1 architecture. */
  nn_link(nn, "0 -l-> 1");   /* Inputs to hidden link. */
  nn_link(nn, "1 -l-> 2");   /* Hidden to output link. */

  /* Set the Activation functions of the hidden and output layers and
   * initialize the weights to uniform random values between -/+mag.
   */
  nn_set_actfunc(nn, 1, 0, afunc);
  nn_set_actfunc(nn, 2, 0, "logistic");
  nn_init(nn, mag);
 
  /* Convert the C matrix into a DATASET.  There are two inputs, one
   * output, and four patterns total.
   */
  data = dataset_create(&dsm_matrix_method,
			dsm_c_matrix(&rawdata[0][0], 4, 4, 4));

  /* Tell the NN how to train itself.
   */
  nn->info.train_set = data;
  nn->info.opt.min_epochs = minepochs;
  nn->info.opt.max_epochs = maxepochs;
  nn->info.opt.error_tol = etol;
  nn->info.opt.delta_error_tol = detol;
  nn->info.opt.hook = training_hook;
  nn->info.opt.rate = rate;

  if(strcmp(srch, "hybrid") == 0)
    nn->info.opt.stepf = opt_lnsrch_hybrid;
  else if(strcmp(srch, "golden") == 0)
    nn->info.opt.stepf = opt_lnsrch_golden;
  else if(strcmp(srch, "cubic") == 0)
    nn->info.opt.stepf = opt_lnsrch_cubic;
  else if(strcmp(srch, "none") == 0)
    nn->info.opt.stepf = NULL;
  
  if(strcmp(alg, "cgpr") == 0)
    nn->info.opt.engine = opt_conjgrad_pr;
  else if(strcmp(alg, "cgfr") == 0)
    nn->info.opt.engine = opt_conjgrad_fr;
  else if(strcmp(alg, "qndfp") == 0)
    nn->info.opt.engine = opt_quasinewton_dfp;
  else if(strcmp(alg, "qnbfgs") == 0)
    nn->info.opt.engine = opt_quasinewton_bfgs;
  else if(strcmp(alg, "lm") == 0)
    nn->info.opt.engine = opt_levenberg_marquardt;
  else if(strcmp(alg, "bp") == 0) {
    nn->info.opt.engine = opt_gradient_descent;
    nn->info.opt.stepf = NULL;
    nn->info.subsample = 1;
    nn->info.opt.stepf = nn_lnsrch_search_then_converge;
    nn->info.opt.momentum = 0.9;
    nn->info.stc_eta_0 = 1;
    nn->info.stc_tau = 100;
  }

  /* Do the training.  This will print out the epoch number and
   * The error level until trianing halts via one of the stopping
   * criterion.
   */
  nn_train(nn);

  /* Print out each input training pattern and the respective
   * NN output.
   */
  printf("--------------------\n");
  nn_offline_test(nn, data, testing_hook);

#if 1
  { 
    const double dw = 0.000001;
    double jj1, jj2, *Rg, Rin[4], Rdout[4], dedy[4], err;
    int j, k, l, n = nn->numweights;
    Rg = allocate_array(1, sizeof(double), nn->numweights);
    nn->need_all_grads = 1;
    for(k = 0; k < 4; k++) {
      
      nn_forward(nn, &rawdata[k][0]);
      for(l = 0; l < nn->numout; l++)
	dedy[l] = nn->y[l] - rawdata[k][l];
      nn_backward(nn, dedy);
      for(l = 0; l < nn->numout; l++)
	/* Fixed */
	Rin[l] =  nn->dx[l] - dedy[l];

      nn_Rforward(nn, Rin, NULL);
      for(l = 0; l < nn->numout; l++)
	/* Fixed */
        Rdout[l] = nn->Ry[l] - nn->Rx[l];

      nn_Rbackward(nn, Rdout);
      nn_get_Rgrads(nn, Rg);

      for(j = 0; j < n; j++) {
	nn_forward(nn, &rawdata[k][0]);
	for(l = 0; l < nn->numout; l++)
	  dedy[l] = nn->y[l] - rawdata[k][l];
	nn_backward(nn, dedy);
	jj1 = 0;
	for(l = 0; l < nn->numout; l++)
	  jj1 += 0.5 * (dedy[l] - nn->dx[l]) * (dedy[l] - nn->dx[l]);

	*nn->weights[j] += dw;
	nn_forward(nn, &rawdata[k][0]);
	for(l = 0; l < nn->numout; l++)
	  dedy[l] = nn->y[l] - rawdata[k][l];
	nn_backward(nn, dedy);
	jj2 = 0;
	for(l = 0; l < nn->numout; l++)
	  jj2 += 0.5 * (dedy[l] - nn->dx[l]) * (dedy[l] - nn->dx[l]);
	err = fabs(Rg[j] - (jj2 - jj1) / dw) / fabs(Rg[j]);
	printf("(%d, %2d) ja = % .5e  jn = % .5e  error = % .2e  %s\n",
	       k, j, Rg[j], (jj2 - jj1) / dw,
	       err, (err > 10e-4) ? "BAD" : "GOOD");
	*nn->weights[j] -= dw;
      }
    }
  }
#endif

  /* Free up everything.
   */
  nn_destroy(nn);
  dsm_destroy_matrix(dataset_destroy(data));
  nn_shutdown();

  /* Bye.
   */
  exit(0); 
}
void RemoveApplication(nsINIParser& parser, const char* curExeDir, const char* profile)  {
  if (!isProfileOverridden) {
    // Remove the desktop entry file.
    char desktopEntryFilePath[MAXPATHLEN];

    char* dataDir = getenv("XDG_DATA_HOME");

    if (dataDir && *dataDir) {
      snprintf(desktopEntryFilePath, MAXPATHLEN, "%s/applications/owa-%s.desktop", dataDir, profile);
    } else {
      char* home = getenv("HOME");
      snprintf(desktopEntryFilePath, MAXPATHLEN, "%s/.local/share/applications/owa-%s.desktop", home, profile);
    }

    unlink(desktopEntryFilePath);
  }

  // Remove the files from the installation directory.
  char webAppIniPath[MAXPATHLEN];
  snprintf(webAppIniPath, MAXPATHLEN, "%s/%s", curExeDir, kWEBAPP_INI);
  unlink(webAppIniPath);

  char curExePath[MAXPATHLEN];
  snprintf(curExePath, MAXPATHLEN, "%s/%s", curExeDir, kAPP_RT);
  unlink(curExePath);

  char webAppJsonPath[MAXPATHLEN];
  snprintf(webAppJsonPath, MAXPATHLEN, "%s/%s", curExeDir, kWEBAPP_JSON);
  unlink(webAppJsonPath);

  char iconPath[MAXPATHLEN];
  snprintf(iconPath, MAXPATHLEN, "%s/icon.png", curExeDir);
  unlink(iconPath);

  char appName[MAXPATHLEN];
  if (NS_FAILED(parser.GetString("Webapp", "Name", appName, MAXPATHLEN))) {
    strcpy(appName, profile);
  }

  char uninstallMsg[MAXPATHLEN];
  if (NS_SUCCEEDED(parser.GetString("Webapp", "UninstallMsg", uninstallMsg, MAXPATHLEN))) {
    /**
     * The only difference between libnotify.so.4 and libnotify.so.1 for these symbols
     * is that notify_notification_new takes three arguments in libnotify.so.4 and
     * four in libnotify.so.1.
     * Passing the fourth argument as NULL is binary compatible.
     */
    typedef void  (*notify_init_t)(const char*);
    typedef void* (*notify_notification_new_t)(const char*, const char*, const char*, const char*);
    typedef void  (*notify_notification_show_t)(void*, void**);

    void *handle = dlopen("libnotify.so.4", RTLD_LAZY);
    if (!handle) {
      handle = dlopen("libnotify.so.1", RTLD_LAZY);
      if (!handle)
        return;
    }

    notify_init_t nn_init = (notify_init_t)(uintptr_t)dlsym(handle, "notify_init");
    notify_notification_new_t nn_new = (notify_notification_new_t)(uintptr_t)dlsym(handle, "notify_notification_new");
    notify_notification_show_t nn_show = (notify_notification_show_t)(uintptr_t)dlsym(handle, "notify_notification_show");
    if (!nn_init || !nn_new || !nn_show) {
      dlclose(handle);
      return;
    }

    nn_init(appName);

    void* n = nn_new(uninstallMsg, NULL, "dialog-information", NULL);

    nn_show(n, NULL);

    dlclose(handle);
  }
}
Esempio n. 6
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options.
   */
  double mag = 0.1, etol = 10e-3, detol = 10e-8;
  int seed = 0, minepochs = 10, maxepochs = 100;
  char *afunc = "tanh";

  /* The OPTION array is used to easily parse command-line options.
   */
  OPTION opts[] = {
    { "-seed",      OPT_INT,    &seed,      "random number seed"           },
    { "-minepochs", OPT_INT,    &minepochs, "minimum # of training steps"  },
    { "-maxepochs", OPT_INT,    &maxepochs, "maximum # of training steps"  },
    { "-afunc",     OPT_STRING, &afunc,     "act. function for hidden node"},
    { "-mag",       OPT_DOUBLE, &mag,       "max size of initial weights"  },
    { "-etol",      OPT_DOUBLE, &etol,      "error tolerance"              },
    { "-detol",     OPT_DOUBLE, &detol,     "delta error tolerance"        },
    { NULL,         OPT_NULL,   NULL,       NULL                           }
  };

  /* The DATASET and the NN that we will use.
   */
  DATASET *data;
  NN *nn;

  /* Set it so that xalloc_report() will print to the screen.
   */
  ulog_threshold = ULOG_DEBUG;
  
  /* Get the command-line options.
   */
  get_options(argc, argv, opts, "Train a NN on XOR data.\n");

  /* Set the random seed.
   */
  srandom(seed);

  /* Create the neural network.  This one has two inputs, one hidden node,
   * and a single output.  The input are connected to the hidden node 
   * and the outputs, while the hidden node is just connected to the
   * outputs.
   */
  nn = nn_create("2 1 1");   /* 2-1-1 architecture. */
  nn_link(nn, "0 -l-> 1");   /* Inputs to hidden link. */
  nn_link(nn, "1 -l-> 2");   /* Hidden to output link. */
  nn_link(nn, "0 -l-> 2");   /* Input to output short-circuit link. */  

  /* Set the Activation functions of the hidden and output layers and
   * initialize the weights to uniform random values between -/+mag.
   */
  nn_set_actfunc(nn, 1, 0, afunc);
  nn_set_actfunc(nn, 2, 0, "logistic");
  nn_init(nn, mag);
 
  /* Convert the C matrix into a DATASET.  There are two inputs, one
   * output, and four patterns total.
   */
  data = dataset_create(&dsm_matrix_method,
			dsm_c_matrix(&xor_data[0][0], 2, 1, 4));

  /* Tell the NN how to train itself.
   */
  nn->info.train_set = data;
  nn->info.opt.min_epochs = minepochs;
  nn->info.opt.max_epochs = maxepochs;
  nn->info.opt.error_tol = etol;
  nn->info.opt.delta_error_tol = detol;

  nn_train(nn);
  nn_offline_test(nn, data, NULL);

  nn_write(nn, "xor.net");
  nn_destroy(nn);
  nn = nn_read("xor.net");
  nn_destroy(nn);
  unlink("xor.net");

  dsm_destroy_matrix(dataset_destroy(data));
  nn_shutdown();

  xalloc_report();

  /* Bye.
   */
  exit(0); 
}
Esempio n. 7
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options. */
  double noise = 0.0;
  int seed = 0, nbasis = 4, points = 100;

  /* The OPTION array is used to easily parse command-line options. */
  OPTION opts[] = {
    { "-noise",  OPT_DOUBLE, &noise,  "variance of Gaussian noise"   },
    { "-seed",   OPT_INT,    &seed,   "random number seed"           },
    { "-nbasis", OPT_INT,    &nbasis, "number of basis functions"    },
    { "-points", OPT_INT,    &points, "number of data points"        },
    { NULL,      OPT_NULL,   NULL,    NULL                           }
  };

  /* The DATASET and the NN that we will use. */
  DATASET *data;
  NN *nn;

  /* Get the command-line options.  */
  get_options(argc, argv, opts, help_string, NULL, 0);
  srandom(seed);

  /* Make the data, and build a CNLS net. */
  data = make_data(points, noise);
  nn = nn_create("2 (%d %d) %d 1", nbasis, nbasis, nbasis);
  nn_set_actfunc(nn, 1, 0, "linear");
  nn_set_actfunc(nn, 1, 1, "exp(-x)");
  nn_set_actfunc(nn, 2, 0, "linear");
  nn_set_actfunc(nn, 3, 0, "linear");

  nn_link(nn, "0 -l-> (1 0)");
  nn_link(nn, "0 -e-> (1 1)");
  nn_link(nn, "(1 1) -l-> 3");
  nn_link(nn, "(1 0) (1 1) -p-> 2");
  nn_link(nn, "2 -l-> 3");

  nn_init(nn, 1);

  nn->info.train_set = data;
  nn->info.opt.min_epochs = 10;
  nn->info.opt.max_epochs = 100;
  nn->info.opt.error_tol = 1e-5;
  nn->info.opt.delta_error_tol = 1e-7;
  nn->info.opt.hook = training_hook;
  nn_train(nn);

  /* Now, let's see how well the NN performs.
   */
  nn_offline_test(nn, data, testing_hook);

  /* Free up everything.
   */
  nn_destroy(nn);
  series_destroy(dataset_destroy(data));
  nn_shutdown();

  /* Bye.
   */
  exit(0); 
}
Esempio n. 8
0
int main(int argc, char **argv)
{
  /* These variables are for command-line options.
   */
  double mag = 1.0, etol = 10e-3, detol = 10e-8;
  double rate = 0.1, moment = 0.9, subsamp = 0, decay = 0.9;
  int seed = 0, minepochs = 10, maxepochs = 100;
  char *afunc = "tanh";
  void *linealg = opt_lnsrch_golden, *optalg = opt_conjgrad_pr;

  OPTION_SET_MEMBER optsetm[] = {
    { "cgpr",   opt_conjgrad_pr },
    { "cgfr",   opt_conjgrad_fr },
    { "qndfp",  opt_quasinewton_dfp },
    { "qnbfgs", opt_quasinewton_bfgs },
    { "lm",     opt_levenberg_marquardt },
    { "gd",     opt_gradient_descent },
    { NULL,     NULL }
  };

  OPTION_SET_MEMBER linesetm[] = {
    { "golden", opt_lnsrch_golden },
    { "hybrid", opt_lnsrch_hybrid },
    { "cubic",  opt_lnsrch_cubic },
    { "stc",    nn_lnsrch_search_then_converge },
    { "none",   NULL },
    { NULL,     NULL }
  };

  OPTION_SET lineset = { &linealg, linesetm };
  OPTION_SET optset = { &optalg, optsetm };
    
  /* The OPTION array is used to easily parse command-line options.
   */
  OPTION opts[] = {
    { "-seed",      OPT_INT,    &seed,      "random number seed"           },
    { "-minepochs", OPT_INT,    &minepochs, "minimum # of training steps"  },
    { "-maxepochs", OPT_INT,    &maxepochs, "maximum # of training steps"  },
    { "-afunc",     OPT_STRING, &afunc,     "act. function for hidden node"},
    { "-mag",       OPT_DOUBLE, &mag,       "max size of initial weights"  },
    { "-etol",      OPT_DOUBLE, &etol,      "error tolerance"              },
    { "-detol",     OPT_DOUBLE, &detol,     "delta error tolerance"        },
    { "-rate",      OPT_DOUBLE, &rate,      "learning rate"                },
    { "-moment",    OPT_DOUBLE, &moment,    "momentum rate"                },
    { "-alg",       OPT_SET,    &optset,    "training algorithm"           },
    { "-subsamp",   OPT_DOUBLE, &subsamp,   "subsample value"  },
    { "-decay",     OPT_DOUBLE, &decay,     "stochastic decay"  },
    { "-srch",      OPT_SET,    &lineset,   "line search" },
    { NULL,         OPT_NULL,   NULL,       NULL                           }
  };

  /* The DATASET and the NN that we will use.
   */
  DATASET *data;
  NN *nn;

  /* Get the command-line options.
   */
  get_options(argc, argv, opts, help_string, NULL, 0);

  /* Set the random seed.
   */
  srandom(seed);

  /* Create the neural network.  This one has two inputs, one hidden node,
   * and a single output.  The input are connected to the hidden node 
   * and the outputs, while the hidden node is just connected to the
   * outputs.
   */
  nn = nn_create("2 1 1");   /* 2-1-1 architecture. */
  nn_link(nn, "0 -l-> 1");   /* Inputs to hidden link. */
  nn_link(nn, "1 -l-> 2");   /* Hidden to output link. */
  nn_link(nn, "0 -l-> 2");   /* Input to output short-circuit link. */  

  /* Set the Activation functions of the hidden and output layers and
   * initialize the weights to uniform random values between -/+mag.
   */
  nn_set_actfunc(nn, 1, 0, afunc);
  nn_set_actfunc(nn, 2, 0, "logistic");
  nn_init(nn, mag);
 
  /* Convert the C matrix into a DATASET.  There are two inputs, one
   * output, and four patterns total.
   */
  data = dataset_create(&dsm_matrix_method,
			dsm_c_matrix(&xor_data[0][0], 2, 1, 4));

  /* Tell the NN how to train itself.
   */
  nn->info.train_set = data;
  nn->info.opt.min_epochs = minepochs;
  nn->info.opt.max_epochs = maxepochs;
  nn->info.opt.error_tol = etol;
  nn->info.opt.delta_error_tol = detol;
  nn->info.opt.hook = training_hook;
  nn->info.opt.rate = rate;
  nn->info.opt.momentum = moment;
  nn->info.opt.decay = decay;
  nn->info.subsample = subsamp;
  if(subsamp != 0) {
    nn->info.subsample = subsamp;
    nn->info.opt.stochastic = 1;
  }
  nn->info.opt.stepf = linealg;
  nn->info.opt.engine = optalg;
  nn->info.stc_eta_0 = 1;
  nn->info.stc_tau = 100;


  /* Do the training.  This will print out the epoch number and
   * The error level until trianing halts via one of the stopping
   * criterion.
   */
  nn_train(nn);
  nn->info.subsample = 0;

  /* Print out each input training pattern and the respective
   * NN output.
   */
  printf("--------------------\n");
  nn_offline_test(nn, data, testing_hook);

  /* Free up everything.
   */
  nn_destroy(nn);
  dsm_destroy_matrix(dataset_destroy(data));
  nn_shutdown();

  /* Bye.
   */
  exit(0); 
}
Esempio n. 9
0
int main( int argc, char *argv[] )
{

#if 0
    sample_t smp;
    int nin, nout;
    load_sample( "test.smpl", &smp, &nin, &nout ); 
    printf( "%i %i\n", nin, nout );
    for ( int i = 0; i < nin; i++ )
    {
        printf( "in[%i]=%g\n", i, smp.input[i] ); 
    }
    for ( int i = 0; i < nout; i++ )
    {
        printf( "out[%i]=%g\n", i, smp.output[i] ); 
    }
    return 0;
#endif
    // set up sample
    sample_t s[4];// s1, s2, s3, s4;
    s[0].input = new double[2];
    s[1].input = new double[2];
    s[2].input = new double[2];
    s[3].input = new double[2];
    s[0].output = new double[1];
    s[1].output = new double[1];
    s[2].output = new double[1];
    s[3].output = new double[1];

    s[0].input[0] = 0;
    s[0].input[1] = 0;
    s[0].output[0] =  0 ;

    s[1].input[0] = 0;
    s[1].input[1] = 1;
    s[1].output[0] =  1 ;

    s[2].input[0] = 1;
    s[2].input[1] = 0;
    s[2].output[0] =  1 ;

    s[3].input[0] = 1;
    s[3].input[1] = 1;
    s[3].output[0] =  0 ;

    srand( RSEED );
    neural_network_t nn;

    //////////////////////////////////////////
    // NB: massive increase in effectiveness
    // of nn when switching from logistic 
    // activation function for output layer
    // to linear!
    //////////////////////////////////////////
    nn_init( 2,	// input
            1,                        // output
            6,                       // hidden
            &logistic_func,
            &logistic_func_deriv,
            //&logistic_func,           
            //&logistic_func_deriv,
            &linear_func,
            &linear_func_deriv,
            1.0,
            &nn );


    double tol = .0001;
    // train samples
    nn_train_samples( &nn, s, 4, 1000000, 1, &tol );

    nn_eval_sample( &nn, s[0] );
    dbg_print_nn( &nn );
    nn_eval_sample( &nn, s[1] );
    dbg_print_nn( &nn );
    nn_eval_sample( &nn, s[2] );
    dbg_print_nn( &nn );
    nn_eval_sample( &nn, s[3] );
    dbg_print_nn( &nn );


    nn_free( &nn );
    return 0;
}