예제 #1
0
static LayerNet *read_header ( FILE *fp )
{
   int model, lin, nin, nh1, nh2, nout, outmod ;
   struct wt_header1 h1 ;
   LayerNet *net ;

   if (! fread ( &h1 , sizeof(h1) , 1 , fp ))
      return NULL ;

   if (strcmp ( h1.id , "MLFN WEIGHT FILE" )) {
      error_message ( "This is not a MLFN WEIGHT file." ) ;
      return NULL ;
      }

   model = h1.model ;
   lin = h1.linear ;
   nin = h1.n_in ;
   nh1 = h1.n_hid1 ;
   nh2 = h1.n_hid2 ;
   nout = h1.n_out ;
   outmod = h1.outmodel ;

   MEMTEXT ( "WT_SAVE: new network for restore" ) ;
   net = new LayerNet ( model , outmod , lin , nin , nh1 , nh2 , nout , 1 , 0 );

   if ((net == NULL)  ||  (! net->ok)) {  // Malloc failure?
      memory_message ( "to create network." ) ;
      if (net != NULL)
         delete net ;
      return NULL ;
      }

   return net ;
}
예제 #2
0
void LayerNet::an1 ( TrainingSet *tptr , struct LearnParams *lptr )
{
   int itry, user_quit ;
   long seed ;
   double best_err ;
   char msg[80] ;
   LayerNet *worknet, *bestnet ;

/*
   Allocate scratch memory
*/

   MEMTEXT ( "AN1::learn new worknet, bestnet" ) ;
   worknet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 0 ) ;
   bestnet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 1 ) ;

   if ((worknet == NULL)  ||  (! worknet->ok)
    || (bestnet == NULL)  ||  (! bestnet->ok)) {
      memory_message ( "to learn" ) ;
      if (worknet != NULL)
         delete worknet ;
      if (bestnet != NULL)
         delete bestnet ;
      errtype = 0 ;
      return ;
      }

   best_err = 1.e30 ;
   for (itry=1 ; itry<=lptr->retries+1 ; itry++) {

      user_quit = anneal1 ( tptr , lptr , worknet , 1 , itry ) ;
      if (neterr < best_err) {
         best_err = neterr ;
         copy_weights ( bestnet , this ) ;
         }

      sprintf ( msg , "Try %d  err=%lf  best=%lf", itry, neterr, best_err ) ;
      normal_message ( msg ) ;

      if (user_quit  ||  (neterr < lptr->quit_err))
         break ;

      seed = flrand() - (long) (itry * 97) ;   // Insure new seed for anneal
      sflrand ( seed ) ;
      zero_weights () ;  // Retry random
      }

   copy_weights ( this , bestnet ) ;
   neterr = best_err ;
   MEMTEXT ( "AN1::learn delete worknet, bestnet" ) ;
   delete worknet ;
   delete bestnet ;
   return ;
}
예제 #3
0
void Network::save_confusion ( char *name )
{
   int i ;
   char *msg ;
   FILE *fp ;

   MEMTEXT ( "CONFUSE:save msg" ) ;
   if ((msg = (char *) MALLOC ( (nout+1) * 5 + 1 )) == NULL ) {
      memory_message ( "to SAVE CONFUSION" ) ;
      return ;
      }

/*
   Open the file to which confusion will be written.
   If it already exists, write a newline at its end.
*/

   fp = fopen ( name , "rt" ) ;
   if (fp != NULL) {
      i = 1 ;
      fclose ( fp ) ;
      }
   else
      i = 0 ;

   if ((fp = fopen ( name , "at" )) == NULL) {
      error_message ( "Cannot open SAVE CONFUSION file" ) ;
      FREE ( msg ) ;
      return ;
      }

   if (i)
      fprintf ( fp , "\n" ) ;

/*
   Write confusion
*/

   for (i=0 ; i<nout ; i++)
      sprintf ( msg+5*i , "%5d" , confusion[i] ) ;
   sprintf ( msg+5*nout , "%5d", confusion[nout] ) ;
   msg[5*nout+5] = 0 ;

   fprintf ( fp , "%s", msg ) ;
   fclose ( fp ) ;
   FREE ( msg ) ;
   return ;
}
예제 #4
0
static Network *read_header ( FILE *fp , int *net_model )
{
   int i, nin, nh1, nh2, nout, outmod, norml ;
   struct wt_header1 h1 ;
   Network *net ;
   KohParams kp ;

   if (! fread ( &h1 , sizeof(h1) , 1 , fp ))
      return NULL ;

   if (strcmp ( h1.id , "NEURAL WEIGHT FILE" )) {
      error_message ( "This is not a NEURAL WEIGHT file." ) ;
      return NULL ;
      }

   *net_model = h1.model ;
   nin = h1.n_in ;
   nh1 = h1.n_hid1 ;
   nh2 = h1.n_hid2 ;
   nout = h1.n_out ;
   outmod = h1.outmodel ;
   kp.normalization = h1.normal ;

   MEMTEXT ( "WT_SAVE: new network for restore" ) ;
   if (*net_model == NETMOD_LAYER)
      net = new LayerNet ( outmod , nin , nh1 , nh2 , nout , 1 , 0 ) ;
   else if (*net_model == NETMOD_KOH)
      net = new KohNet ( nin , nout , &kp , 1 , 0 ) ;
   else if (*net_model == NETMOD_HOP)
      net = new HopNet (nin,nout, 1,0);
   else {
      error_message ( "WEIGHT file specified illegal network model" ) ;
      return NULL ;
      }

   if ((net == NULL)  ||  (! net->ok)) {  // Malloc failure?
      memory_message ( "to create network." ) ;
      if (net != NULL)
	 delete net ;
      return NULL ;
      }

   return net ;
}
예제 #5
0
static PNNet *read_header ( FILE *fp )
{
   int model, nin, nout, outmod, kernl, mom ;
   struct wt_header1 h1 ;
   PNNet *net ;

   if (! fread ( &h1 , sizeof(h1) , 1 , fp ))
      return NULL ;

   if (strcmp ( h1.id , "PNN NETWORK FILE" )) {
      error_message ( "This is not a PNN NETWORK file." ) ;
      return NULL ;
      }

   model = h1.model ;
   kernl = h1.kernel ;
   mom = h1.maxmom ;
   nin = h1.n_in ;
   nout = h1.n_out ;
   outmod = h1.outmodel ;

   MEMTEXT ( "WT_SAVE: new network for restore" ) ;
   if (model == NETMOD_BASIC)
      net = new PNNbasic ( kernl , outmod , nin , nout ) ;
   else if (model == NETMOD_SEPVAR)
      net = new PNNsepvar ( kernl , outmod , nin , nout ) ;
   else if (model == NETMOD_SEPCLASS)
      net = new PNNsepclass ( kernl , outmod , nin , nout ) ;
   else if (model == NETMOD_GCNN)
      net = new GCNN ( mom , 0 , outmod , nin , nout ) ;
   else if (model == NETMOD_GCNN_EW)
      net = new GCNN ( mom , 1 , outmod , nin , nout ) ;

   if ((net == NULL)  ||  (! net->ok)) {  // Malloc failure?
      memory_message ( "to create network." ) ;
      if (net != NULL)
         delete net ;
      return NULL ;
      }

   return net ;
}
예제 #6
0
void Network::show_confusion ()
{
   int i ;
   char *msg ;

   MEMTEXT ( "CONFUSE:show msg" ) ;
   if ((msg = (char *) MALLOC ( (nout+1) * 5 + 11 )) == NULL ) {
      memory_message ( "to SHOW CONFUSION" ) ;
      return ;
      }

   strcpy ( msg , "Confusion:" ) ;
   for (i=0 ; i<nout ; i++)
      sprintf ( msg+5*i+10 , "%5d" , confusion[i] ) ;
   sprintf ( msg+5*nout+10, "%5d", confusion[nout] ) ;
   msg[5*nout+15] = 0 ;
   normal_message ( msg ) ;
   FREE ( msg ) ;
   return ;
}
예제 #7
0
void Network::classify_from_file ( char *name , double thresh )
{
   int i, maxlin, did_any, best ;
   double *inputs, *iptr, maxact ;
   char msg[81], *line, *lptr ;
   FILE *fp ;

/*
   Open the file which contains the data to be classified
*/

   if ((fp = fopen ( name , "rt" )) == NULL) {
      strcpy ( msg , "Cannot open " ) ;
      strcat ( msg , name ) ;
      error_message ( msg ) ;
      return ;
      }

/*
   Allocate for the file lines as read. Conservatively guess length.
   Also allocate for network input vector.
*/

   maxlin = nin * 20 + 100 ;
   if (maxlin < 1024)
      maxlin = 1024 ;

   MEMTEXT ( "CONFUSE:line, inputs" ) ;
   line = (char *) MALLOC ( maxlin ) ;
   inputs = (double *) MALLOC ( nin * sizeof(double) ) ;

   if ((line == NULL)  ||  (inputs == NULL)) {
      memory_message ( "to classify" ) ;
      fclose ( fp ) ;
      if (line != NULL)
         FREE ( line ) ;
      if (inputs != NULL)
         FREE ( inputs ) ;
      return ;
      }

/*
   Read the file.
*/

   did_any = 0 ;  /* If file runs out on first try, ERROR! */

   for (;;) {  // Endless loop reads until file exhausted

      if ((fgets ( line , maxlin , fp ) == NULL) || (strlen ( line ) < 2)) {
         if ((! did_any)  ||  ferror ( fp )) {
            strcpy ( msg , "Problem reading file " ) ;
            strcat ( msg , name ) ;
            error_message ( msg ) ;
            }
         break ;
         }

      lptr = line ;              // Parse the data from this line
      iptr = inputs ;            // This will be the network inputs
      for (i=0 ; i<nin ; i++)
         *iptr++ = ParseDouble ( &lptr ) ;

      did_any = 1 ;              // Flag that at least one found
      trial ( inputs ) ;         // Compute network's outputs

      maxact = -1.e30 ;          // Will keep highest activity here
      best = 0 ;                 // Insurance only (good habit)
      for (i=0 ; i<nout ; i++) { // Find winning output
         if (out[i] > maxact) {
            maxact = out[i] ;
            best = i ;
            }
         }

      if (maxact >= thresh)   // If winner has enough activation
         ++confusion[best] ;  // count it in confusion
      else                    // If too little, throw it
         ++confusion[nout] ;  // in the reject category

      while ((! feof ( fp ))  &&  (line[strlen(line)-1] != '\n'))
         fgets ( line , maxlin , fp ) ; // Line length may exceed maxlin

      if (feof ( fp ))
         break ;
      }  /* Endless loop until a file runs out */

   fclose ( fp ) ;
   MEMTEXT ( "CONFUSE:line, inputs" ) ;
   FREE ( line ) ;
   FREE ( inputs ) ;
}
예제 #8
0
void Network::test_from_file (
   char *dataname ,  // Input file name
   char *outname,     // Output file name
   int   netmod
   )
{
   int i, maxlin, did_any, best,name=0,win1,win2,win3 ;
   float *inputs, *iptr, maxi1,maxi2,maxi3 ;
   char msg[81], *line, *lptr, *tab[20];
   FILE *fp_in, *fp_out, *fname;


/*
   Open the file which contains the data to be classified
*/
   if ((fp_in = fopen ( dataname , "rt" )) == NULL) {
      strcpy ( msg , "Cannot open input data file " ) ;
      strcat ( msg , dataname ) ;
      error_message ( msg ) ;
      return ;
      }

/*
   Open the file to which outputs will be written.
   If it already exists, write a newline at its end.
*/

   fp_out = fopen ( outname , "rt" ) ;
   if (fp_out != NULL) {
      did_any = 1 ;
      fclose ( fp_out ) ;
      }
   else
      did_any = 0 ;

   if ((fp_out = fopen ( outname , "at" )) == NULL) {
      strcpy ( msg , "Cannot open output file " ) ;
      strcat ( msg , outname ) ;
      error_message ( msg ) ;
      fclose ( fp_in ) ;
      return ;
      }

   if (did_any)
   {
      fprintf ( fp_out , "\n" ) ;
   }

/*
   Allocate for the file lines as read. Conservatively guess length.
   Also allocate for network input vector.
*/

   maxlin = nin * 20 + 100 ;
   if (maxlin < 1024)
      maxlin = 1024 ;

   MEMTEXT ( "EXECUTE:line, inputs" ) ;
   line = (char *) MALLOC ( maxlin ) ;

   inputs = (float *) MALLOC ( nin * sizeof(float) ) ;

   if ((line == NULL)  ||  (inputs == NULL)) {
      memory_message ( "to execute" ) ;
      fclose ( fp_in ) ;
      fclose ( fp_out ) ;
      if (line != NULL)
	 FREE ( line ) ;
      if (inputs != NULL)
	 FREE ( inputs ) ;
      return ;
      }

/*
   Read and process the file.
*/

   did_any = 0 ;  /* If file runs out on first try, ERROR! */
   int e=0;
   for (;;) {  // Endless loop reads until file exhausted

      if ((fgets ( line , maxlin , fp_in ) == NULL) || (strlen ( line ) < 2)) {
	 if ((! did_any)  ||  ferror ( fp_in )) {
	    strcpy ( msg , "Problem reading file " ) ;
	    strcat ( msg , dataname ) ;
	    error_message ( msg ) ;
	    }
	 break ;
	 }

      lptr = line ;           // Parse the data from this line
      iptr = inputs ;         // This will be the network inputs
      for (i=0 ; i<nin ; i++)
	 *iptr++ = ParseDouble ( &lptr ) ;

      if (did_any)            // Start each new case on new line
	 fprintf ( fp_out , "\n" ) ;

      did_any = 1 ;           // Flag that at least one found
      trial ( inputs ) ;      // Compute network's outputs
      maxi1=maxi2=maxi3=0.0;
      win1=win2=win3=0;
      // Saving maximun activation and the winner of the output vector
      if (netmod==NETMOD_KOH)
      {
	// First maximum
	for (i=0 ; i<nout ; i++)
	{
	   if (out[i]>maxi1)
	   {
	     maxi1=out[i];
	     win1=i;
	   }
	}
	// 2nd Maximum
	for (i=0 ; i<nout ; i++)
	{
	   if ( (out[i]>maxi2) && (out[i]<maxi1) )
	   {
	     maxi2=out[i];
	     win2=i;
	   }
	}
	// 3rd Maximum
	for (i=0 ; i<nout ; i++)
	{
	   if ( (out[i]>maxi3) && (out[i]<maxi2) )
	   {
	     maxi3=out[i];
	     win3=i;
	   }
	}

      }

      if (netmod==NETMOD_KOH)
      {
	fprintf (fp_out,"%d %3.2lf\n",win1,maxi1*100.0);
	fprintf (fp_out,"%d %3.2lf\n",win2,maxi2*100.0);
	fprintf (fp_out,"%d %3.2lf",win3,maxi3*100.0);
      }
      else
	for (i=0 ; i<nout ; i++)
	{
	   fprintf ( fp_out , "%.4lf ",out[i]);
	}

      e++;
      while ((! feof ( fp_in ))  &&  (line[strlen(line)-1] != '\n'))
	 fgets ( line , maxlin , fp_in ) ; // Line length may exceed maxlin

      if (feof ( fp_in ))
	 break ;
      }  /* Endless loop until a file runs out */

   MEMTEXT ( "EXECUTE:line, inputs" ) ;
   fclose ( fp_in ) ;
   fclose ( fp_out ) ;
   FREE ( line ) ;
   FREE ( inputs ) ;
}
예제 #9
0
void LayerNet::gen_init (
   TrainingSet *tptr ,        // Training set to use
   struct LearnParams *lptr   // User's general learning parameters
   )
{
   int i, istart, individual, best_individual, generation, n_cross ;
   int first_child, parent1, parent2, improved, crosspt, nchoices, *choices ;
   int initpop, popsize, gens, climb, nvars, chromsize, split, ind ;
   float pcross, pmutate, error, besterror, *errors, *fitness, worst ;
   float fquit, favor_best, fitfac, maxerr, minerr, avgerr, overinit ;
   SingularValueDecomp *sptr ;
   struct GenInitParams *gptr ;  // User's genetic initialization parameters
   char *pool1, *pool2, *oldpop, *newpop, *popptr, *temppop, *best ;
   char msg[80] ;

   gptr = lptr->gp ;

   popsize = gptr->pool ;
   gens = gptr->gens ;
   climb = gptr->climb ;
   overinit = gptr->overinit ;
   pcross = gptr->pcross ;
   pmutate = gptr->pmutate ;

   fquit = lptr->quit_err ;

   favor_best = 3.1 ;
   fitfac = -20.0 ;


/*
--------------------------------------------------------------------------------

   Do all scratch memory allocation.

--------------------------------------------------------------------------------
*/

/*
   Allocate the singular value decomposition object for REGRESS.
*/

   if (nhid2 == 0)         // One hidden layer
      nvars = nhid1 + 1 ;
   else                    // Two hidden layers
      nvars = nhid2 + 1 ;

   MEMTEXT ( "GEN_INIT: new SingularValueDecomp" ) ;
   sptr = new SingularValueDecomp ( tptr->ntrain , nvars , 1 ) ;

   if ((sptr == NULL)  || ! sptr->ok) {
      memory_message("for genetic initialization. Try ANNEAL NOREGRESS.");
      neterr = 1.0 ;    // Flag failure to LayerNet::learn which called us
      if (sptr != NULL)
         delete sptr ;
      return ;
      }

   chromsize = nhid1 * (nin+1) ;        // Length of an individual's chromosome
   if (nhid2)                           // is the number of hidden weights
      chromsize += nhid2 * (nhid1+1) ;

   errors = fitness = NULL ;
   choices = NULL ;
   pool1 = pool2 = NULL ;
   MEMTEXT ( "GEN_INIT: errors, fitness, choices, best, pool1,pool2");
   if (((errors = (float*) MALLOC ( popsize * sizeof(float))) == NULL)
    || ((fitness = (float*) MALLOC ( popsize * sizeof(float))) == NULL)
    || ((best = (char*) MALLOC( chromsize )) == NULL)
    || ((choices = (int*) MALLOC ( popsize * sizeof(int))) == NULL)
    || ((pool1 = (char*) MALLOC( popsize * chromsize )) == NULL)
    || ((pool2 = (char*) MALLOC( popsize * chromsize )) == NULL)) {
      if (errors != NULL)
         FREE ( errors ) ;
      if (fitness != NULL)
         FREE ( fitness ) ;
      if (choices != NULL)
         FREE ( choices ) ;
      if (pool1 != NULL)
         FREE ( pool1 ) ;
      if (pool2 != NULL)
         FREE ( pool2 ) ;
      delete sptr ;
      memory_message("for genetic initialization.  Try ANNEAL NOREGRESS." ) ;
      neterr = 1.0 ;  // Flag failure to LayerNet::learn which called us
      return ;
      }

/*
   Generate initial population pool.

   We also preserve the best weights across all generations,
   as this is what we will ultimately return to the user.
   Its mean square error is besterror.
*/

   besterror = 1.e30 ;     // For saving best (across all individuals and gens)
   maxerr = avgerr = 0.0 ; // For progress display only
   best_individual = 0 ;   // Safety only
   initpop = popsize * overinit ; // Overinitialization of initial population
   progress_message ( "\nGenerating initial population" ) ;

   for (ind=0 ; ind<initpop ; ind++) { // Try overinitialization times

      if (ind<popsize)                 // If still in pop size limit
         individual = ind ;            // just use next avail space
      else {                           // Else we search entire pop
         worst = -1. ;                 // for the worst member
         for (i=0 ; i<popsize ; i++) { // which we will then replace
            if (errors[i] > worst) {
               worst = errors[i] ;
               individual = i ;
               }
            }
         avgerr -= worst ;             // Exclude discards from average
         }

      popptr = pool1 + individual * chromsize ; // Build init pop in pool1
      rand_ind ( popptr , chromsize ) ;         // Randomly generate individual
      decode ( popptr , nin , nhid1 , nhid2 ,   // Convert genotype (chromosome)
               hid1_coefs , hid2_coefs );       // to phenotype (weights)
      error = regress ( tptr , sptr ) ;         // Evaluate network error
      errors[individual] = error ;              // and keep all errors

      if (error < besterror) {                  // Keep track of best
         besterror = error ;                    // as it is returned to user
         best_individual = individual ;         // This is its index in pool1
         }

      if (error > maxerr)                       // Max and average error are
         maxerr = error ;                       // for progress display only
      avgerr += error ;

      if (error <= fquit)
         break ;

      progress_message ( "." ) ;
      }

   sprintf (msg , "\nInitial pop:    Min err=%7.4lf   Max=%7.4lf   Avg=%7.4lf",
	    100. * besterror, 100. * maxerr, 100.0 * avgerr / (float) popsize);
   progress_message ( msg ) ;


/*
   The initial population has been built in pool1.
   Copy its best member to 'best' in case it never gets beat (unlikely
   but possible!).
   Also, we will need best if the climb option is true.
*/

   popptr = pool1 + best_individual * chromsize ; // Point to best
   memcpy ( best , popptr , chromsize ) ;         // and save it

/*
   This is the main generation loop.  There are two areas for population pool
   storage: pool1 and pool2.  At any given time, oldpop will be set to one of
   them, and newpop to the other.  This avoids a lot of copying.
*/

   oldpop = pool1 ;       // This is the initial population
   newpop = pool2 ;       // The next generation is created here

   for (generation=0 ; generation<gens ; generation++) {

      if (error <= fquit) // We may have satisfied this in init pop
         break ;          // So we test at start of generation loop

      error_to_fitness ( popsize , favor_best , fitfac , errors , fitness ) ;

      fitness_to_choices ( popsize , fitness , choices ) ;

      nchoices = popsize ;         // Will count down as choices array emptied
      n_cross = pcross * popsize ; // Number crossing over
      first_child = 1 ;            // Generating first of parent's 2 children?
      improved = 0 ;               // Flags if we beat best

      if (climb) {                 // If we are to hill climb
         memcpy ( newpop , best , chromsize ) ; // start with best
         errors[0] = besterror ;   // Record its error
         istart = 1 ;              // and start children past it
         }
      else
         istart = 0 ;

/*
   Generate the children
*/

      maxerr = avgerr = 0.0 ;   // For progress display only
      minerr = 1.0 ;            // Ditto

      for (individual=istart ; individual<popsize ; individual++) {

         popptr = newpop + individual * chromsize ; // Will put this child here

         if (first_child)  // If this is the first of 2 children, pick parents
            pick_parents ( &nchoices , choices , &parent1 , &parent2 ) ;

         if (n_cross-- > 0)    // Do crossovers first
            reproduce ( oldpop + parent1 * chromsize , oldpop + parent2 * chromsize ,
                        first_child , chromsize , popptr , &crosspt , &split ) ;
         else if (first_child) // No more crossovers, so just copy parent
            memcpy ( popptr , oldpop + parent1 * chromsize , chromsize ) ;
         else
            memcpy ( popptr , oldpop + parent2 * chromsize , chromsize );

         if (pmutate > 0.0)
            mutate ( popptr , chromsize , pmutate ) ;

         decode ( popptr , nin , nhid1 , nhid2 , hid1_coefs , hid2_coefs ) ;
         error = regress ( tptr , sptr ) ; // Evaluate child's error
         errors[individual] = error ;      // and keep each

         if (error < besterror) {          // Keep track of best
            besterror = error ;            // It will be returned to user
            best_individual = individual ; // This is its index in newpop
            improved = 1 ;                 // Flag so we copy it later
            }

         if (error > maxerr)               // Min, max and average error
            maxerr = error ;               // for progress display only
         if (error < minerr)
            minerr = error ;
         avgerr += error ;

         if (error <= fquit)
            break ;

         first_child = ! first_child ;
         } // For all genes in population

/*
   We finished generating all children.  If we improved (one of these
   children beat the best so far) then copy that child to the best.
   Swap oldpop and newpop for the next generation.
*/

      if (improved) {
         popptr = newpop + best_individual * chromsize ; // Point to best
         memcpy ( best , popptr , chromsize ) ;          // and save it
         }

      temppop = oldpop ;   // Switch old and new pops for next generation
      oldpop = newpop ;
      newpop = temppop ;

      sprintf(msg, "\nGeneration %3d: Min err=%7.4lf   Max=%7.4lf   Avg=%7.4lf",
              generation+1, 100. * minerr, 100. * maxerr,
	      100.0 * avgerr / (float) popsize ) ;
      progress_message ( msg ) ;
      }

/*
   We are all done.
*/

   decode ( best , nin , nhid1 , nhid2 , hid1_coefs , hid2_coefs ) ;
   besterror = regress ( tptr , sptr ) ;              // Evaluate network error

   MEMTEXT ( "GEN_INIT: errors, fitness, choices, best, pool1,pool2");
   FREE ( errors ) ;
   FREE ( fitness ) ;
   FREE ( choices ) ;
   FREE ( best ) ;
   FREE ( pool1 ) ;
   FREE ( pool2 ) ;
   MEMTEXT ( "GEN_INIT: delete sptr" ) ;
   delete sptr ;
}
예제 #10
0
void LayerNet::anneal (
   TrainingSet *tptr ,        // Training set to use
   struct LearnParams *lptr , // User's general learning parameters
   LayerNet *bestnet ,        // Work area used to keep best network
   int init                   // Use zero suffix (initialization) anneal parms?
   )
{
   int ntemps, niters, setback, reg, nvars, key, user_quit ;
   int i, iter, improved, ever_improved, itemp ;
   long seed, bestseed ;
   char msg[80] ;
   double tempmult, temp, fval, bestfval, starttemp, stoptemp, fquit ;
   SingularValueDecomp *sptr ;
   struct AnnealParams *aptr ; // User's annealing parameters
                             
   aptr = lptr->ap ;

/*
   The parameter 'init' is nonzero if we are initializing
   weights for learning.  If zero we are attempting to break
   out of a local minimum.  The main effect  of this parameter
   is whether or not we use the zero suffix variables in the
   anneal parameters.
   A second effect is that regression is used only for
   initialization, not for escape.
*/

   if (init) {
      ntemps = aptr->temps0 ;
      niters = aptr->iters0 ;
      setback = aptr->setback0 ;
      starttemp = aptr->start0 ;
      stoptemp = aptr->stop0 ;
      }
   else {
      ntemps = aptr->temps ;
      niters = aptr->iters ;
      setback = aptr->setback ;
      starttemp = aptr->start ;
      stoptemp = aptr->stop ;
      }

/*
   Initialize other local parameters.  Note that there is no sense using
   regression if there are no hidden layers.  Also, regression is almost
   always counterproductive for local minimum escape.
*/

   fquit = lptr->quit_err ;
   reg = init  &&  nhid1  &&  (lptr->init != 1) ;

/*
   Allocate the singular value decomposition object for REGRESS.
   Also allocate a work area for REGRESS to preserve matrix.
*/

   if (reg) {
      if (nhid1 == 0)         // No hidden layer
         nvars = nin + 1 ;
      else if (nhid2 == 0)    // One hidden layer
         nvars = nhid1 + 1 ;
      else                    // Two hidden layers
         nvars = nhid2 + 1 ;

      MEMTEXT ( "ANNEAL: new SingularValueDecomp" ) ;
      sptr = new SingularValueDecomp ( tptr->ntrain , nvars , 1 ) ;

      if ((sptr == NULL)  || ! sptr->ok) {
         memory_message (
            "for annealing with regression. Try ANNEAL NOREGRESS.");
         if (sptr != NULL)
            delete sptr ;
         neterr = 1.0 ; // Flag failure to LayerNet::learn which called us
         return ;
         }
      }

/*
   For every temperature, the center around which we will perturb is the
   best point so far.  This is kept in 'bestnet', so initialize it to the
   user's starting estimate.   Also, initialize 'bestfval', the best
   function value so far, to be the function value at that starting point.
*/

   copy_weights ( bestnet , this ) ; // Current weights are best so far
   if (init)
      bestfval = 1.e30 ;  // Force it to accept SOMETHING
   else 
      bestfval = trial_error ( tptr ) ;

/*
   This is the temperature reduction loop and the iteration within
   temperature loop.  We use a slick trick to keep track of the
   best point at a given temperature.  We certainly don't want to
   replace the best every time an improvement is had, as then we
   would be moving our center about, compromising the global nature
   of the algorithm.  We could, of course, have a second work area
   in which we save the 'best so far for this temperature' point.
   But if there are a lot of variables, the usual case, this wastes
   memory.  What we do is to save the seed of the random number
   generator which created the improvement.  Then later, when we
   need to retrieve the best, simply set the random seed and
   regenerate it.  This technique also saves a lot of copying time
   if many improvements are made for a single temperature.
*/

   temp = starttemp ;
   tempmult = exp( log( stoptemp / starttemp ) / (ntemps-1)) ;
   ever_improved = 0 ;                       // Flags if improved at all
   user_quit = 0 ;                           // Flags user pressed ESCape

   for (itemp=0 ; itemp<ntemps ; itemp++) {  // Temp reduction loop

      improved = 0 ;                         // Flags if this temp improved

      if (init) {
         sprintf ( msg , "\nANNEAL temp=%.2lf ", temp ) ;
         progress_message ( msg ) ;
         }

      for (iter=0 ; iter<niters ; iter++) {  // Iters per temp loop

         seed = longrand () ;                // Get a random seed
         slongrand ( seed ) ;                // Brute force set it
         perturb (bestnet, this, temp, reg) ;// Randomly perturb about best

         if (reg)                            // If using regression, estimate
            fval = regress ( tptr , sptr ) ; // out weights now
         else                                // Otherwise just evaluate
            fval = trial_error ( tptr ) ;

         if (fval < bestfval) {              // If this iteration improved
            bestfval = fval ;                // then update the best so far
            bestseed = seed ;                // and save seed to recreate it
            ever_improved = improved = 1 ;   // Flag that we improved

            if (bestfval <= fquit)           // If we reached the user's
               break ;                       // limit, we can quit

            iter -= setback ;                // It often pays to keep going
            if (iter < 0)                    // at this temperature if we
               iter = 0 ;                    // are still improving
            }
         }                                   // Loop: for all iters at a temp

      if (improved) {                        // If this temp saw improvement
         slongrand ( bestseed ) ;            // set seed to what caused it
         perturb (bestnet, this, temp, reg) ;// and recreate that point
         copy_weights ( bestnet , this ) ;   // which will become next center
         slongrand ( bestseed / 2 + 999 ) ;  // Jog seed away from best

         if (init) {
            sprintf ( msg , " err=%.3lf%% ", 100.0 * bestfval ) ;
            progress_message ( msg ) ;
            }
         }

      if (bestfval <= fquit)  // If we reached the user's
         break ;              // limit, we can quit

/***********************************************************************
      if (kbhit()) {          // Was a key pressed?
         key = getch () ;     // Read it if so
         while (kbhit())      // Flush key buffer in case function key
            getch () ;        // or key was held down
         if (key == 27) {     // ESCape
            user_quit = 1 ;   // Flags user that ESCape was pressed
            break ;
            }
	    }
***********************************************************************/


      if (user_quit)
         break ;

      temp *= tempmult ;      // Reduce temp for next pass
      }                       // through this temperature loop


/*
   The trials left this weight set and neterr in random condition.
   Make them equal to the best, which will be the original
   if we never improved.

   Also, if we improved and are using regression, recall that bestnet
   only contains the best hidden weights, as we did not bother to run
   regress when we updated bestnet.  Do that now before returning.
*/

   copy_weights ( this , bestnet ) ; // Return best weights in this net
   neterr = bestfval ;               // Trials destroyed weights, err

   if (ever_improved  &&  reg)
      neterr = regress ( tptr , sptr ) ; // regressed output weights

   if (reg) {
      MEMTEXT ( "ANNEAL: delete SingularValueDecomp" ) ;
      delete sptr ;
      }
}
예제 #11
0
void TrainingSet::train (
   char *filename ,   // Full name of file supplying training data
   int outclass       // Output class number if CLASSIF output mode
   )
{
   int i, did_any, maxlin ;
   unsigned size, nbuf, tset_bytes, maxtrain ;
   double *tptr, *temp ;
   char msg[81], *line, *lptr ;
   FILE *fp ;

/*
   Compute the size of each training sample.
   CLASSIF output uses a double at the end of each input sample to indicate
   the class membership (0=reject).
   We will allocate memory in chunks to save calls to realloc.
   Each chunk will hold nbuf cases.
   The number of training sets is limited by the maximum unsigned int bytes
   which we can allocate.
*/

   if (outmod == OUTMOD_CLASSIFY)
      size = nin + 1 ;
   else if (outmod == OUTMOD_AUTO)
      size = nin ;
   else if (outmod == OUTMOD_GENERAL)
      size = nin + nout ;

   tset_bytes = size * sizeof(double) ;
   nbuf = TRAIN_BUF_SIZE / tset_bytes ;
   if (! nbuf) {
      error_message ( "Cases too large.  Increase TRAIN_BUF_SIZE." ) ;
      return ;
      }

   maxtrain = ((unsigned) ~0) / (nbuf * tset_bytes) * nbuf ;

   if (ntrain >= maxtrain) {
      error_message ( "Too many training sets." ) ;
      return ;
      }

   if ((fp = fopen ( filename , "rt" )) == NULL) {
      sprintf ( msg , "Cannot open %s", filename ) ;
      error_message ( msg ) ;
      return ;
      }

/*
   Conservatively estimate the length of each file line, then allocate it.
*/

   if (outmod == OUTMOD_GENERAL)
      maxlin = (nin + nout) * 20 + 100 ;
   else 
      maxlin = nin * 20 + 100 ;
   if (maxlin < 1024)
      maxlin = 1024 ;

   MEMTEXT ( "TRAIN:line" ) ;
   if ((line = (char *) MALLOC ( maxlin )) == NULL) {
      memory_message ( "to read training set" ) ;
      fclose ( fp ) ;
      return ;
      }

/*
   If the training set is empty so far, do the initial allocation.
   We allocate for nbuf samples at a time to save calls to REALLOC.
*/

   if (data == NULL) {
      MEMTEXT ( "TRAIN: first block" ) ;
      data = (double *) MALLOC ( nbuf * tset_bytes ) ;
      if (data == NULL) {
         memory_message ( "to read training set" ) ;
         goto FINISH ;
         }
      bufcnt = nbuf ;
      }

/*
   Read the file.
*/

   did_any = 0 ;  /* If file runs out on first try, ERROR! */

   for (;;) {  // Endless loop reads until file exhausted

      if ((fgets ( line , maxlin , fp ) == NULL) || (strlen ( line ) < 2)) {
         if ((! did_any)  ||  ferror ( fp )) {  // Recall fgets returns newline
            strcpy ( msg , "Problem reading file " ) ;
            strcat ( msg , filename ) ;
            error_message ( msg ) ;
            goto FINISH ;
            }
         else
            break ;   // Normal end of file
         }

      if (! bufcnt--) {  // Allocate a new memory block if needed

         if ((ntrain + nbuf) > maxtrain) {
            error_message ( "Too many training sets." ) ;
            bufcnt = 0 ;
            goto FINISH ;
            }

         MEMTEXT ( "TRAIN: new block" ) ;
         temp = (double *) REALLOC ( data , (ntrain + nbuf) * tset_bytes ) ;
         if (temp == NULL) {
            memory_message ( "to read training set" ) ;
            bufcnt = 0 ;
            goto FINISH ;
            }

         data = temp ;
         bufcnt = nbuf - 1 ;
         }

      lptr = line ;                          // Parse the data from this line
      tptr = data + ntrain * size ;          // This sample will go here
      for (i=0 ; i<nin ; i++)                // Start with inputs
         *tptr++ = ParseDouble ( &lptr ) ;
      if (outmod == OUTMOD_GENERAL) {        // Also do outputs if GENERAL
         for (i=0 ; i<nout ; i++)
            *tptr++ = ParseDouble ( &lptr ) ;
         }

      if (outmod == OUTMOD_CLASSIFY)         // Save class if CLASSIF output
         *tptr++ = (double) outclass + .1 ; // .1 lets us safely truncate

      did_any = 1 ;   // Flag that we got some data, so EOF legitimate
      ++ntrain ;

      while ((! feof ( fp ))  &&  (line[strlen(line)-1] != '\n'))
         fgets ( line , maxlin , fp ) ; // Line length may exceed what is needed

      if (feof ( fp ))
         break ;

      }  /* Endless loop until a file runs out */

/*
   Save a little space by shrinking to exactly what we needed
*/

   MEMTEXT ( "TRAIN:final" ) ;
   data = (double *) REALLOC ( data , ntrain * tset_bytes ) ;
   bufcnt = 0 ;

FINISH:
   fclose ( fp ) ;
   MEMTEXT ( "TRAIN:line" ) ;
   FREE ( line ) ;
}
예제 #12
0
//*******************************************************************
// WinMain - Neural main
//
// parameters:
//             hInstance     - The instance of this instance of this
//                             application.
//             hPrevInstance - The instance of the previous instance
//                             of this application. This will be 0
//                             if this is the first instance.
//             lpszCmdLine   - A long pointer to the command line that
//                             started this application.
//             cmdShow       - Indicates how the window is to be shown
//                             initially. ie. SW_SHOWNORMAL, SW_HIDE,
//                             SW_MIMIMIZE.
//
// returns:
//             wParam from last message.
//
//*******************************************************************
int PASCAL WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance,
						 LPSTR lpszCmdLine, int cmdShow)
{


/*
	Declarations of local variables
*/

	int control_file_number = -1 ;           // Stack pointer for control files
	FILE *control_files[MAX_CONTROL_FILES] ; // This is the stack

	char *control_line ;    // User's commands here
	char *command, *rest ;  // Pointers to its command and parameter parts
	int n_command, n_rest ; // Lengths of those parts

	int net_model = -1 ;     // Network model (see NETMOD_? in CONST.H)
	int out_model = -1 ;     // Output model (see OUTMOD_? in CONST.H)
	int n_inputs = -1 ;      // Number of input neurons
	int n_outputs = -1 ;     // Number of output neurons
	int n_hidden1 = -1 ;     // Number of hidden layer one neurons
	int n_hidden2 = -1 ;     // Ditto layer 2 (0 if just one hidden layer)


	TrainingSet *tset = NULL ;            // Training set here
	Network *network = NULL ;             // Network here
	struct LearnParams learn_params ;     // General learning parameters
	struct AnnealParams anneal_params ;   // Simulated annealing parameters
	struct GenInitParams geninit_params ; // Genetic initialization parameters
	struct KohParams koh_params ;         // Kohonen parameters

	int classif_output = -1 ;  // Current class (0=reject) for classif training
	char out_file[80] = "" ;   // File for EXECUTE output
	float threshold ;         // CLASSIFY confusion reject cutoff
	char resp_file[80]="";     // file for initializing output neuron's name
	char train_file[80]="";
/*
	Miscellaneous variables
*/

	int i, n, m ;
	float p ;
	char *msg ;
	FILE *fp ;
	unsigned long me,mc;
	char *fname;
	char *control;

#if VERSION_16_BIT
	if (sizeof(int) > 2) {
		printf ( "\nRecompile with VERSION_16_BIT set to 0 in CONST.H" ) ;
		exit ( 1 ) ;
		}
#else
	if (sizeof(int) < 4) {
		printf ( "\nRecompile with VERSION_16_BIT set to 1 in CONST.H" ) ;
		exit ( 1 ) ;
		}
#endif


printf ( "\nNEURAL SYSTEM - Program to train and test neural networks" ) ;

if (argc>1)
{
  strcpy(fname,argv[1]);
}


/*
   Process command line parameters
*/

   mem_name[0] = 0 ;  // Default is no memory allocation file
 /*
   if (strlen ( mem_name )) {
      strcat ( mem_name , ":mem.log" ) ;
      fp = fopen ( mem_name , "wt" ) ;
      if (fp == NULL) {
	 printf ( "\nCannot open debugging file %s", mem_name ) ;
	 exit ( 1 ) ;
	 }
      fclose ( fp ) ;
      mem_log = 1 ;
      }
   else
      mem_log = 0 ;
   */
   mem_log  = 0 ;
   mem_used = 0 ;
/*
   Initialize defaults
*/

   learn_params.init = -1 ;
   learn_params.quit_err = 0.0 ;
   learn_params.retries = 32767 ;

   anneal_params.temps0 = 3 ;
   anneal_params.temps = 4 ;
   anneal_params.iters0 = 50 ;
   anneal_params.iters = 20 ;
   anneal_params.setback0 = 50 ;
   anneal_params.setback = 20 ;
   anneal_params.start0 = 3.0 ;
   anneal_params.start = 4.0 ;
   anneal_params.stop0 = 1.0 ;
   anneal_params.stop = 0.02 ;

   geninit_params.pool = 50 ;
   geninit_params.gens = 3 ;
   geninit_params.climb = 0 ;
   geninit_params.overinit = 1.5 ;
   geninit_params.pcross = 0.8 ;
   geninit_params.pmutate = 0.0001 ;

   koh_params.normalization = 0 ;  // 0=multiplicative, 1=Z
   koh_params.learn_method = 1 ;   // 0=additive, 1=subtractive
   koh_params.rate = 0.4 ;         // learning rate
   koh_params.reduction = 0.99 ;   // learning rate reduction

   learn_params.ap = &anneal_params ;
   learn_params.gp = &geninit_params ;
   learn_params.kp = &koh_params ;


   act_func_init () ; // Initialize interpolation table for activation function

   MEMTEXT ( "NEURAL: control_line, msg" ) ;
   if (((control_line = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL)
    || ((msg = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL)) {
      printf ( "\nInsufficient memory" ) ;
      exit ( 1 ) ;
      }

/*
   Main loop processes all commands
*/

   for (;;) {
      if (argv[1])
      {
	 strcpy(control_line,"CONTROL:");
	 strcat(control_line,fname);
	 //printf("%s\n",control_line);
	 argv[1]=NULL;
      }
      else
	 get_control_line ( control_line , &control_file_number, control_files ) ;

      split_control_line ( control_line , &command , &n_command ,
			   &rest , &n_rest ) ;

      if (! n_command) {
	 if (n_rest) {
	    sprintf ( msg , "No colon after command: %s", rest ) ;
	    error_message ( msg ) ;
	    }
	 continue ;
	 }

      sprintf ( msg , "%s : %s", command, rest ) ;
      normal_message ( msg ) ;

/*
   Act on the command
*/

      if (! strcmp ( command , "QUIT" ))
	 break ;

      if (! strcmp ( command , "CONTROL" )) {
	 stack_control_file (rest, &control_file_number, control_files) ;
	 continue ;
	 }

      if (! strcmp ( command , "NETWORK MODEL" )) {
	 // Multi layer network
	 if (! strcmp ( rest , "LAYER" ))
	    n = NETMOD_LAYER ;
	 // Kohonen network
	 else if (! strcmp ( rest , "KOHONEN" ))
	    n = NETMOD_KOH ;
	 // Hopfield network
	 else if (! strcmp ( rest , "HOPFIELD" ))
	    n = NETMOD_HOP ;
	// Bidirectionnal associative memory network
	 else if (! strcmp ( rest , "BAM" ))
	    n = NETMOD_BAM ;

	 else {
	    sprintf ( msg , "Illegal NETWORK MODEL: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (net_model == n)
	    continue ;
	 if (ok_to_clear_weights( &network )) {
	    net_model = n ;
	    learn_params.init = -1 ;
	    }
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "OUTPUT MODEL" )) {
	 if (! strcmp ( rest , "CLASSIFY" ))
	    n = OUTMOD_CLASSIFY ;
	 else if (! strcmp ( rest , "AUTO" ))
	    n = OUTMOD_AUTO ;
	 else if (! strcmp ( rest , "GENERAL" ))
	    n = OUTMOD_GENERAL ;
	 else {
	    sprintf ( msg , "Illegal OUTPUT MODEL: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (out_model == n)
	    continue ;
	 if ((ok_to_clear_tset( &tset )) && (ok_to_clear_weights( &network)))
	    out_model = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "N INPUTS" )) {
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n <= 0)  ||  (n > MAX_INPUTS)) {
	    sprintf ( msg , "Illegal N INPUTS: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (n_inputs == n)
	    continue ;
	 if ((ok_to_clear_tset( &tset)) && (ok_to_clear_weights(&network)))
	    n_inputs = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "N OUTPUTS" )) {
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n <= 0)  ||  (n > MAX_OUTPUTS)) {
	    sprintf ( msg , "Illegal N OUTPUTS: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (n_outputs == n)
	    continue ;
	 if ((ok_to_clear_tset( &tset)) && (ok_to_clear_weights(&network)))
	    n_outputs = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "N HIDDEN1" )) {
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n < 0)  ||  (n > MAX_HIDDEN)) {
	    sprintf ( msg , "Illegal N HIDDEN1: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (n_hidden1 == n)
	    continue ;
	 if (ok_to_clear_weights( &network ))
	    n_hidden1 = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "N HIDDEN2" )) {
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n < 0)  ||  (n > MAX_HIDDEN)) {
	    sprintf ( msg , "Illegal N HIDDEN2: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (n  &&  ! n_hidden1) {
	    error_message ( "N HIDDEN2 must be 0 if N HIDDEN1 IS 0." ) ;
	    continue ;
	    }
	 if (n_hidden2 == n)
	    continue ;
	 if (ok_to_clear_weights( &network ))
	    n_hidden2 = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "TRAIN" )) {
	 if ((out_model == OUTMOD_AUTO)  &&  (n_outputs != n_inputs)) {
	    warning_message ( "Setting N OUTPUTS = N INPUTS" ) ;
	    n_outputs = n_inputs ;
	    }
	 if (out_model <= 0)
	    error_message ( "TRAIN used before OUTPUT MODEL set." ) ;
	 else if (n_inputs <= 0)
	    error_message ( "TRAIN used before N INPUTS set." ) ;
	 else if (n_outputs <= 0)
	    error_message ( "TRAIN used before N OUTPUTS set." ) ;
	 else if ((net_model == NETMOD_HOP) && (n_inputs != n_outputs))
	    error_message("HOPFIELD netowork requires INPUTS = OUTPUTS.");
	 else if ((net_model == NETMOD_BAM) && (out_model != OUTMOD_GENERAL))
	    error_message("BAM network requires AUTO output mode.");
	 else if ((net_model == NETMOD_HOP) && (out_model != OUTMOD_AUTO))
	    error_message("HOFIELD network requires AUTO output mode.");
	 else if ((net_model != NETMOD_KOH) && (out_model == OUTMOD_CLASSIFY)
		  &&  (classif_output < 0))
	    error_message( "CLASSIFY output mode but CLASSIFY OUTPUT not set.");
	 else if ((net_model == NETMOD_KOH)  &&  (out_model != OUTMOD_CLASSIFY))
	    error_message( "KOHONEN network requires CLASSIFY output mode.");
	 else {
	    if (tset == NULL) {
	       MEMTEXT ( "NEURAL: new tset" ) ;
	       tset = new TrainingSet ( out_model , n_inputs , n_outputs ) ;
	       }
	    tset->train ( rest , classif_output ) ;
	    strcpy(train_file,rest);
	 }
	 continue ;
	 }

      if (check_anneal ( command , rest , &anneal_params ))
	 continue ;

      if (check_genetic ( command , rest , &geninit_params ))
	 continue ;

      if (check_kohonen ( command , rest , &koh_params , &network ))
	 continue ;

      if (check_learn_params ( command , rest , &learn_params , net_model ))
	 continue ;

      if (! strcmp ( command , "LEARN" )) {
	 if ((tset == NULL)  ||  (tset->ntrain == 0)) {
	    error_message ( "Cannot LEARN; No training set exists." ) ;
	    continue ;
	    }
	 if ((net_model == NETMOD_KOH)  &&  (out_model != OUTMOD_CLASSIFY)) {
	    error_message( "KOHONEN network requires CLASSIFY output mode.");
	    continue ;
	    }
	 if (learn_params.init < 0) {
	    error_message( "Initialization method not set.");
	    continue ;
	    }
	 if (network == NULL)
	 {
	    if (net_model == NETMOD_LAYER)
	    {
	       if (n_hidden1 < 0)
	       {
		  error_message ( "LEARN used before N HIDDEN1 set." ) ;
		  continue ;
	       }
	       else if (n_hidden2 < 0)
	       {
		  error_message ( "LEARN used before N HIDDEN2 set." ) ;
		  continue ;
	       }
	       else
	       {
		  MEMTEXT ( "NEURAL: new LayerNet" ) ;
		  network = new LayerNet ( out_model , n_inputs , n_hidden1 ,
					   n_hidden2 , n_outputs , 1 , 1 ) ;
	       }
	    }
	    else if (net_model == NETMOD_KOH)
	    {
	       MEMTEXT ( "NEURAL: new KohNet" ) ;
	       network = new KohNet ( n_inputs , n_outputs ,
				      &koh_params , 1 , 1 ) ;
	    }
	    else if (net_model == NETMOD_HOP)
	    {

	       MEMTEXT ( "NEURAL: new HopNet" );
	       network = new HopNet (n_inputs,n_outputs, 1,1);
	    }

	    else if (net_model == NETMOD_BAM)
	    {
	       MEMTEXT ("NEURAL: new BamNet");
	       network = new LayerNet ( out_model , n_inputs , n_hidden1 ,
					n_hidden2 , n_outputs , 1 , 1 ) ;

	    }
	 }
	 if ((network == NULL)  ||  (! network->ok)) {  // Malloc failure?
	    memory_message ( "to create network." ) ;
	    if (network != NULL) {
	       delete network ;
	       network = NULL ;
	       }
	    continue ;
	    }
	 normal_message("Learning...\n");
	 network->learn ( tset , &learn_params ) ;
	 normal_message("End of Learning\n");
	 if (network->neterr > 0.999999) {  // Indicates massive failure
	    MEMTEXT ( "NEURAL: learn failure delete network" ) ;
	    delete network ;
	    network = NULL ;
	    }
	 else {
	    sprintf ( msg , "Final error = %.4lf%% of max possible",
		      100.0 * network->neterr ) ;
	    normal_message ( msg ) ;
	    }
	 continue ;
	 }

      if (! strcmp ( command , "SAVE WEIGHTS" )) {
	 if (network == NULL)
	    error_message ( "There are no learned weights to save." ) ;
	 else
	    wt_save ( network , net_model , 0 , rest ) ;
	 continue ;
	 }

      if (! strcmp ( command , "RESTORE WEIGHTS" )) {
	 if (network != NULL) {
	    MEMTEXT ( "NEURAL: delete network for restore" ) ;
	    delete network ;
	    network = NULL ;
	    }
	 network = wt_restore ( rest , &net_model ) ;
	 if (network == NULL)
	    continue ;
	 if (tset != NULL) {
	    if ((tset->nin != network->nin)
	     || (tset->nout != network->nout)
	     || (tset->outmod != network->outmod)) {
	       error_message ( "Network conflicts with existing training set.");
	       continue ;
	       }
	    }
	 out_model = network->outmod ;
	 n_inputs = network->nin ;
	 n_outputs = network->nout ;
	 if (net_model == NETMOD_LAYER) {
	    n_hidden1 = ((LayerNet*) network)->nhid1 ;
	    n_hidden2 = ((LayerNet*) network)->nhid2 ;
	    }
	 if (net_model == NETMOD_KOH)
	    koh_params.normalization = ((KohNet *) network)->normalization ;
	 learn_params.init = -1 ;
	 continue ;
	 }

      if (! strcmp ( command , "CLEAR TRAINING" )) {
	 if (tset != NULL) {
	    MEMTEXT ( "NEURAL: delete tset" ) ;
	    delete tset ;
	    tset = NULL ;
	    }
	 continue ;
	 }

      if (! strcmp ( command , "CLEAR WEIGHTS" )) {
	 if (network != NULL) {
	    MEMTEXT ( "NEURAL: delete network" ) ;
	    delete network ;
	    network = NULL ;
	    }
	 continue ;
	 }

      if (! strcmp ( command , "CLASSIFY OUTPUT" )) {
	 if (net_model == NETMOD_KOH) {
	    error_message ( "Cannot specify output for KOHONEN model." ) ;
	    continue ;
	    }
	 if (n_outputs < 0) {
	    error_message ( "CLASSIFY OUTPUT used before N OUTPUTS set." ) ;
	    continue ;
	    }
	 if (out_model != OUTMOD_CLASSIFY) {
	    error_message
		  ( "CLASSIFY OUTPUT only valid when OUTPUT MODEL:CLASSIFY" ) ;
	    continue ;
	    }
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n < 0)) {
	    sprintf ( msg , "Illegal CLASSIFY OUTPUT: %s", rest ) ;
	    error_message ( msg ) ;
	    }
	 else if (n > n_outputs) {
	    sprintf ( msg , "CLASSIFY OUTPUT (%d) exceeds N OUTPUTS (%d)",
		      n, n_outputs ) ;
	    error_message ( msg ) ;
	    }
	 else
	    classif_output = n ;
	 continue ;
	 }

      if (! strcmp ( command , "OUTPUT FILE" )) {
	 strcpy ( out_file , rest ) ;
	 continue ;
	 }

      if (! strcmp ( command , "EXECUTE" ))
      {
	 if (network == NULL)
	    error_message ( "There is no trained network" ) ;
	 else
	 {
	    network->execute_from_file ( rest , out_file) ;
	    continue ;
	 }
      }

      if (! strcmp ( command , "TEST NETWORK" ))
      {
	 if (network == NULL)
	    error_message ( "There is no trained network" ) ;
	 else
	 {
	    network->test_from_file ( rest ,out_file,net_model) ;
	    continue ;
	 }
      }

      if (! strcmp ( command , "CLASSIFY" )) {
	 if (network == NULL)
	    error_message ( "There is no trained network" ) ;
	 else if (out_model != OUTMOD_CLASSIFY)
	    error_message ( "CLASSIFY valid only in CLASSIFY output mode" ) ;
	 else
	    network->classify_from_file ( rest , threshold ) ;
	 continue ;
	 }

      if (! strcmp ( command , "RESET CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else
            network->reset_confusion () ;
         continue ;
         }

      if (! strcmp ( command , "CONFUSION THRESHOLD" )) {
	 p = atof ( rest ) ;
	 if ((p < 0.0)  ||  (p > 100.0)) {
	    sprintf ( msg , "Illegal CONFUSION THRESHOLD: %s", rest ) ;
            error_message ( msg ) ;
            }
	 else
            threshold = p / 100.0 ;
         continue ;
         }

      if (! strcmp ( command , "SHOW CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
	    error_message ( "CONFUSION valid only in CLASSIFY output mode" ) ;
         else
            network->show_confusion () ;
         continue ;
	 }

      if (! strcmp ( command , "SAVE CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
            error_message ( "CONFUSION valid only in CLASSIFY output mode" ) ;
         else
            network->save_confusion ( rest ) ;
	 continue ;
         }

      sprintf ( msg , "Unknown command: %s", command ) ;
      error_message ( msg ) ;

      } // Endless command loop

   MEMTEXT ( "NEURAL: control_line, msg" ) ;
   FREE ( control_line ) ;
   FREE ( msg ) ;
   MEMCLOSE () ;
   exit ( 0 ) ;
}
예제 #13
0
void LayerNet::anx_dd ( TrainingSet *tptr , struct LearnParams *lptr )
{
   int itry, n_escape, n_retry, bad_count, new_record, refined ;
   long seed ;
   double err, prev_err, best_err, start_of_loop_error, best_inner_error ;
   double initial_accuracy, final_accuracy ;
   char msg[80] ;
   LayerNet *worknet, *worknet2, *bestnet ;

   n_escape = n_retry = 0 ;

/*
   Allocate scratch memory
*/

   MEMTEXT ( "ANX_DD::learn new worknet, bestnet" ) ;
   worknet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 0 ) ;
   bestnet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 1 ) ;

   if ((worknet == NULL)  ||  (! worknet->ok)
    || (bestnet == NULL)  ||  (! bestnet->ok)) {
      memory_message ( "to learn" ) ;
      if (worknet != NULL)
         delete worknet ;
      if (bestnet != NULL)
         delete bestnet ;
      errtype = 0 ;
      return ;
      }

   if ((lptr->method == METHOD_AN2_CJ)  ||  (lptr->method == METHOD_AN2_LM)) {
      worknet2 = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                                nout , 0 , 0 ) ;
      if ((worknet2 == NULL)  ||  (! worknet2->ok)) {
         if (worknet2 != NULL)
            delete worknet2 ;
         delete worknet ;
         delete bestnet ;
         memory_message ( "to learn" ) ;
         errtype = 0 ;
         return ;
         }
      }
   else
      worknet2 = NULL ;

/*
   Start by annealing around the starting weights.  These will be zero if the
   net was just created.  If it was restored or partially trained already,
   they will be meaningful.  Anneal1 guarantees that it will not return all
   zero weights if there is at least one hidden layer, even if that means
   that the error exceeds the amount that could be attained by all zeros.
*/

   best_err = best_inner_error = 1.e30 ;
   if ((lptr->method == METHOD_AN1_CJ)  ||  (lptr->method == METHOD_AN1_LM))
      anneal1 ( tptr , lptr , worknet , 1 , 0 ) ;
   else if ((lptr->method == METHOD_AN2_CJ) || (lptr->method == METHOD_AN2_LM))
      anneal2 ( tptr , lptr , worknet , worknet2 , 1 ) ;

/*
   Do direct descent optimization, finding local minimum.
   Then anneal to break out of it.  If successful, loop back up to
   do direct descent again.  Otherwise restart totally random.
*/

   bad_count = 0 ;         // Handles flat local mins
   refined = 0 ;           // Did we ever refine to high resolution?  Not yet.
   new_record = 0 ;        // Refine every time a new inner error record set
   initial_accuracy = pow ( 10.0 , -lptr->cj_acc ) ;
   final_accuracy = initial_accuracy * pow ( 10.0 , -lptr->cj_refine ) ;

   for (itry=1 ; ; itry++) {

      if (neterr < best_err) {   // Keep track of best
         copy_weights ( bestnet , this ) ;
         best_err = neterr ;
         }

      sprintf ( msg , "Try %d  (best=%lf):", itry, best_err ) ;
      normal_message ( msg ) ;

      if (neterr <= lptr->quit_err)
         break ;

      start_of_loop_error = neterr ;
      if ((lptr->method == METHOD_AN1_CJ)  ||  (lptr->method == METHOD_AN2_CJ))
         err = conjgrad ( tptr , 32767 , initial_accuracy ,
                          lptr->quit_err , lptr->cj_progress ) ;
      else if ((lptr->method==METHOD_AN1_LM) || (lptr->method==METHOD_AN2_LM))
         err = lev_marq ( tptr , 32767 , initial_accuracy ,
                          lptr->quit_err , lptr->cj_progress ) ;
      neterr = fabs ( err ) ; // err<0 if user pressed ESCape

      sprintf ( msg , "  Gradient err=%lf", neterr ) ;
      progress_message ( msg ) ;

      if (neterr < best_err) {   // Keep track of best
         copy_weights ( bestnet , this ) ;
         best_err = neterr ;
         }

      if (err <= lptr->quit_err) { // err<0 if user pressed ESCape
         if (err < -1.e29)         // or insufficient memory
            printf ( "\nInsufficient memory for gradient learning." ) ;
         break ;
         }

      seed = flrand() - (long) (itry * 97) ;   // Insure new seed for anneal
      sflrand ( seed ) ;

      prev_err = neterr ;  // So we can see if anneal helped

      if ((lptr->method == METHOD_AN1_CJ)  ||  (lptr->method == METHOD_AN1_LM))
         anneal1 ( tptr , lptr , worknet , 0 , itry ) ;
      else if ((lptr->method==METHOD_AN2_CJ) || (lptr->method==METHOD_AN2_LM))
         anneal2 ( tptr , lptr , worknet , worknet2 , 0 ) ;

      sprintf ( msg , "  Anneal err=%lf", neterr ) ;
      progress_message ( msg ) ;

      if (neterr < best_err) {  // Keep track of best
         copy_weights ( bestnet , this ) ;
         best_err = neterr ;
         }

      if (best_err <= lptr->quit_err)
         break ;

      if (neterr < best_inner_error) {  // Keep track of best inner for refine
         best_inner_error = neterr ;
         new_record = 1 ;               // Tells us to refine
         }

      if ((prev_err - neterr) > 1.e-7) { // Did we break out of local min?
         if ((start_of_loop_error - neterr) < 1.e-3)
            ++bad_count ;  // Avoid many unprofitable iters
         else
            bad_count = 0 ;
         if (bad_count < 4) {
            ++n_escape ;          // Count escapes from local min
            continue ;            // Escaped, so gradient learn again
            }
         }

/*
   After first few tries, and after each inprovement thereafter, refine
   to high resolution
*/

      if ((itry-n_escape >= lptr->cj_pretries)  &&  (new_record || ! refined)) {
         if (! refined) {   // If refining the best of the pretries
            copy_weights ( this , bestnet ) ;  // Get that net
            neterr = best_err ;
            }
         refined = 1 ;     // Only force refine once
         new_record = 0 ;  // Reset new inner error record flag
         progress_message ( "  REFINING" ) ;
         if ((lptr->method == METHOD_AN1_CJ) || (lptr->method == METHOD_AN2_CJ))
            err = conjgrad ( tptr , 0 , final_accuracy ,
                             lptr->quit_err , lptr->cj_progress ) ;
         else if ((lptr->method==METHOD_AN1_LM)|| (lptr->method==METHOD_AN2_LM))
            err = lev_marq ( tptr , 0 , final_accuracy ,
                             lptr->quit_err , lptr->cj_progress ) ;
         neterr = fabs ( err ) ; // err<0 if user pressed ESCape
         sprintf ( msg , "  Attained err=%lf", neterr ) ;
         progress_message ( msg ) ;
         if (neterr < best_err) {  // Keep track of best
            copy_weights ( bestnet , this ) ;
            best_err = neterr ;
            }
         }

      if (++n_retry > lptr->retries)
         break ;

      progress_message ( "  RESTART" ) ;
      zero_weights () ;  // Failed to break out, so retry random
      seed = flrand() - (long) (itry * 773) ;   // Insure new seed for anneal
      sflrand ( seed ) ;
      if ((lptr->method == METHOD_AN1_CJ)  ||  (lptr->method == METHOD_AN1_LM))
         anneal1 ( tptr , lptr , worknet , 1 , itry ) ;
      else if ((lptr->method==METHOD_AN2_CJ) || (lptr->method==METHOD_AN2_LM))
         anneal2 ( tptr , lptr , worknet , worknet2 , 1 ) ;
      }

FINISH:
   copy_weights ( this , bestnet ) ;
   neterr = best_err ;
   MEMTEXT ( "AN1_DD::learn delete worknet, bestnet" ) ;
   delete worknet ;
   delete bestnet ;
   sprintf ( msg , "%d successful escapes, %d retries", n_escape, n_retry ) ;
   normal_message ( msg ) ;

   return ;
}
예제 #14
0
void LayerNet::ssg (
   TrainingSet *tptr ,        // Training set to use
   struct LearnParams *lptr , // User's general learning parameters
   int use_grad               // SS if zero, else SSG
   )
{
   int itry, user_quit, n, n_grad ;
   long seed ;
   double best_err, *work1, *work2, *grad, *avg_grad ;
   char msg[80] ;
   LayerNet *worknet1, *worknet2, *bestnet ;
                             
/*
   Allocate network scratch memory
*/

   MEMTEXT ( "SSG::new 2 worknets, bestnet" ) ;
   worknet1 = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                             nout , 0 , 0 ) ;
   worknet2 = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                             nout , 0 , 0 ) ;
   bestnet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 1 ) ;

   if ((worknet1 == NULL)  ||  (! worknet1->ok)
    || (worknet2 == NULL)  ||  (! worknet2->ok)
    || (bestnet == NULL)  ||  (! bestnet->ok)) {
      memory_message ( "to learn" ) ;
      if (worknet1 != NULL)
         delete worknet1 ;
      if (worknet2 != NULL)
         delete worknet2 ;
      if (bestnet != NULL)
         delete bestnet ;
      errtype = 0 ;
      return ;
      }

/*
   Allocate gradient work memory.
   Work1 is used for hidden layer 2 deltas in REAL model, and output
   activation partial derivatives and deltas in all COMPLEX models.
   Work2 is output deltas in REAL model, error difference in COMPLEX models.
*/

   if (use_grad) {
      if (nhid2)       // Must be REAL model if this is true
         n = nhid2 ;
      else if (model == NETMOD_COMPLEX_INPUT)
         n = nhid1  ?  nout * 2 + nhid1 * 2  :  nout * 2 ;
      else if (model == NETMOD_COMPLEX_HIDDEN)
         n = nout * 4  +  nhid1 * 4 ;
      else if (model == NETMOD_COMPLEX)
         n = nhid1  ?  nout * 6  +  nhid1 * 4  :  nout * 4 ;
      else
         n = 0 ;

      if (n) {
         MEMTEXT ( "SSG::work1" ) ;
         work1 = (double *) MALLOC ( n * sizeof(double) ) ;
         if (work1 == NULL) {
            memory_message ( "to learn" ) ;
            delete worknet1 ;
            delete worknet2 ;
            delete bestnet ;
            errtype = 0 ;
            return ;
            }
         }
      else
         work1 = NULL ;

      if (nhid1 == 0)               // No hidden layer
         n_grad = nout * nin_n ;
      else if (nhid2 == 0)          // One hidden layer
         n_grad = nhid1 * nin_n + nout * nhid1_n ;
      else                          // Two hidden layers
         n_grad = nhid1 * nin_n + nhid2 * nhid1_n + nout * nhid2_n ;

      MEMTEXT ( "SSG::3 work vectors" ) ;
      work2 = (double *) MALLOC ( nout_n * sizeof(double) ) ;
      grad = (double *) MALLOC ( n_grad * sizeof(double) ) ;
      avg_grad = (double *) MALLOC ( n_grad * sizeof(double) ) ;

      if ((work2 == NULL)  ||  (grad == NULL)  ||  (avg_grad == NULL)) {
         if (work1 != NULL)
            FREE ( work1 ) ;
         if (work2 != NULL)
            FREE ( work2 ) ;
         if (grad != NULL)
            FREE ( grad ) ;
         if (avg_grad != NULL)
            FREE ( avg_grad ) ;
         memory_message ( "to learn" ) ;
         delete worknet1 ;
         delete worknet2 ;
         delete bestnet ;
         errtype = 0 ;
         return ;
         }
      }
   else
      work1 = work2 = grad = avg_grad = NULL ;

   best_err = 1.e30 ;
   for (itry=1 ; itry<=lptr->retries+1 ; itry++) {

      user_quit = ssg_core ( tptr , lptr , worknet1 , worknet2 ,
                             work1 , work2 , grad , avg_grad , n_grad ) ;

      if (neterr < best_err) {
         best_err = neterr ;
         copy_weights ( bestnet , this ) ;
         }

      sprintf ( msg , "Try %d  err=%lf  best=%lf", itry, neterr, best_err ) ;
      normal_message ( msg ) ;

      if (user_quit  ||  (neterr < lptr->quit_err))
         break ;

      seed = flrand() - (long) (itry * 97) ;   // Insure new seed for anneal
      sflrand ( seed ) ;
      zero_weights () ;  // Retry random
      }

   copy_weights ( this , bestnet ) ;
   neterr = best_err ;

   MEMTEXT ( "AN1::learn delete 2 worknets, bestnet" ) ;
   delete worknet1 ;
   delete worknet2 ;
   delete bestnet ;

   if (use_grad) {
      if (work1 != NULL) {
         MEMTEXT ( "SSG::work1" ) ;
         FREE ( work1 ) ;
         }
      MEMTEXT ( "SSG::3 work vectors" ) ;
      FREE ( work2 ) ;
      FREE ( grad ) ;
      FREE ( avg_grad) ;
      }

   return ;

}
예제 #15
0
int LayerNet::ssg_core (
   TrainingSet *tptr ,        // Training set to use
   struct LearnParams *lptr , // User's general learning parameters
   LayerNet *avgnet ,         // Work area used to keep average weights
   LayerNet *bestnet ,        // And the best so far
   double *work1 ,            // Gradient work vector
   double *work2 ,            // Ditto
   double *grad ,             // Ditto
   double *avg_grad ,         // Ditto
   int n_grad                 // Length of above vectors
   )
{
   int ntemps, niters, setback, reg, nvars, user_quit ;
   int i, iter, itemp, n_good, n_bad, use_grad ;
   char msg[80] ;
   double tempmult, temp, fval, bestfval, starttemp, stoptemp, fquit ;
   double avg_func, new_fac, gradlen, grad_weight, weight_used ;
   enum RandomDensity density ;
   SingularValueDecomp *sptr ;
   struct AnnealParams *aptr ; // User's annealing parameters

   aptr = lptr->ap ;

   ntemps = aptr->temps0 ;
   niters = aptr->iters0 ;
   setback = aptr->setback0 ;
   starttemp = aptr->start0 ;
   stoptemp = aptr->stop0 ;
   if (aptr->random0 == ANNEAL_GAUSSIAN)
      density = NormalDensity ;
   else if (aptr->random0 == ANNEAL_CAUCHY)
      density = CauchyDensity ;

   if (! (ntemps * niters))
      return 0 ;

/*
   Initialize other local parameters.  Note that there is no sense using
   regression if there are no hidden layers.
*/

   use_grad = (grad != NULL) ;
   fquit = lptr->quit_err ;
   reg = nhid1 ;

/*
   Allocate the singular value decomposition object for REGRESS.
   Also allocate a work area for REGRESS to preserve matrix.
*/

   if (reg) {                 // False if no hidden layers
      if (nhid2 == 0)         // One hidden layer
         nvars = nhid1_n ;
      else                    // Two hidden layers
         nvars = nhid2_n ;

      i = (model == NETMOD_COMPLEX)  ?  2 * tptr->ntrain : tptr->ntrain ;

      if (i < nvars) {
         warning_message ( "Too few training sets for regression." ) ;
         reg = 0 ;
         }
      else {
         MEMTEXT ( "SSG: new SingularValueDecomp" ) ;
         sptr = new SingularValueDecomp ( i , nvars , 1 ) ;

         if ((sptr == NULL)  || ! sptr->ok) {
            memory_message (
               "for SS(G) with regression.  Using total randomization.");
            if (sptr != NULL)
               delete sptr ;
            reg = 0 ;
            }
         }
      }

/*
   For the basic algorithm, we will keep the current 'average' network
   weight set in avgnet.  This will be the moving center about which the
   perturbation is done.
   Although not directly related to the algorithm itself, we keep track
   of the best network ever found in bestnet.  That is what the user
   will get at the end.
*/

   copy_weights ( bestnet , this ) ; // Current weights are best so far
   copy_weights ( avgnet , this ) ;  // Center of perturbation
   bestfval = trial_error ( tptr ) ;

/*
   If this is being used to initialize the weights, make sure that they are
   not identically zero.  Do this by setting bestfval huge so that
   SOMETHING is accepted later.
*/

   if (nhid1) {
      i = nhid1 * nin_n ;
      while (i--) {
         if (fabs(hid1_coefs[i]) > 1.e-10)
            break ;
         }
      if (i < 0)
         bestfval = 1.e30 ;
      }

/*
   Initialize by cumulating a bunch of points
*/

   normal_message ( "Initializing..." ) ;
   avg_func = 0.0 ;                       // Mean function around center
   if (use_grad) {
      for (i=0 ; i<n_grad ; i++)          // Zero the mean gradient
         avg_grad[i] = 0.0 ;
      }

   for (iter=0 ; iter<niters ; iter++) {  // Initializing iterations

      perturb ( avgnet , this , starttemp , reg , density ) ; // Move point

      if (reg)                            // If using regression, estimate
         fval = regress ( tptr , sptr ) ; // out weights now, ignore fval
      if (use_grad)                       // Also need gradient?
         fval = gradient ( tptr , work1 , work2 , grad ) ; // fval redundant
      else if (! reg)                     // If reg we got fval from regress
         fval = trial_error ( tptr ) ;

      avg_func += fval ;                  // Cumulate mean function

      if (use_grad) {                     // Also need gradient?
         for (i=0 ; i<n_grad ; i++)       // Cumulate mean gradient
            avg_grad[i] += grad[i] ;
         }

      if (fval < bestfval) {              // If this iteration improved
         bestfval = fval ;                // then update the best so far
         copy_weights ( bestnet , this ) ; // Keep the network
         if (bestfval <= fquit)           // If we reached the user's
            goto FINISH ;                 // limit, we can quit
         }

      if ((user_quit = user_pressed_escape ()) != 0)
         goto FINISH ;

      } // Loop: for all initial iters

   avg_func /= niters ;          // Mean of all points around avgnet
   new_fac = 1.0 / niters ;      // Weight of each point

   sprintf ( msg , "  avg=%.6lf  best=%.6lf", avg_func, bestfval ) ;
   progress_message ( msg ) ;

   if (use_grad) {               // Also need gradient?
      gradlen = 0.0 ;            // Will cumulate grad length
      for (i=0 ; i<n_grad ; i++) {  // Find gradient mean and length
         avg_grad[i] /= niters ;
         gradlen += avg_grad[i] * avg_grad[i] ;
         }
      gradlen = sqrt ( gradlen ) ;
      grad_weight = 0.5 ;
      }

/*
   This is the temperature reduction loop and the iteration within
   temperature loop.
*/

   temp = starttemp ;
   tempmult = exp( log( stoptemp / starttemp ) / (ntemps-1)) ;
   user_quit = 0 ;                           // Flags user pressed ESCape

   for (itemp=0 ; itemp<ntemps ; itemp++) {  // Temp reduction loop

      n_good = n_bad = 0 ;                   // Counts better and worse

      sprintf ( msg , "Temp=%.3lf ", temp ) ;
      normal_message ( msg ) ;

      for (iter=0 ; iter<niters ; iter++) {  // Iters per temp loop

         if ((n_bad >= 10)  &&
             ((double) n_good / (double) (n_good+n_bad)  <  0.15))
            break ;

         perturb ( avgnet , this , temp ,
                   reg , density ) ;         // Randomly perturb about center

         if (use_grad)                       // Bias per gradient?
            weight_used = shift ( grad , this , grad_weight , reg ) ;

         if (reg) {                          // If using regression, estimate
            fval = regress ( tptr , sptr ) ; // out weights now
            if ((user_quit = user_pressed_escape ()) != 0)
               break ;
            if (fval >= avg_func) {          // If this would raise mean
               ++n_bad ;                     // Count this bad point for user
               continue ;                    // Skip it and try again
               }
            }

         if (use_grad)                       // Need gradient, fval redundant
            fval = gradient ( tptr , work1 , work2 , grad ) ;
         else if (! reg)                     // If reg we got fval from regress
            fval = trial_error ( tptr ) ;

         if ((user_quit = user_pressed_escape ()) != 0)
            break ;

         if (fval >= avg_func) {             // If this would raise mean
            ++n_bad ;                        // Count this bad point for user
            continue ;                       // Skip it and try again
            }

         ++n_good ;

         if (fval < bestfval) {              // If this iteration improved
            bestfval = fval ;                // then update the best so far
            copy_weights ( bestnet , this ) ; // Keep the network

            if (bestfval <= fquit)           // If we reached the user's
               break ;                       // limit, we can quit

            iter -= setback ;                // It often pays to keep going
            if (iter < 0)                    // at this temperature if we
               iter = 0 ;                    // are still improving
            }

         adjust ( avgnet , this , reg , new_fac ) ; // Move center slightly
         avg_func = new_fac * fval  +  (1.0 - new_fac) * avg_func ;
         if (use_grad) {
            grad_weight = new_fac * weight_used + (1.0 - new_fac) * grad_weight ;
            for (i=0 ; i<n_grad ; i++)          // Adjust mean gradient
               avg_grad[i] = new_fac * grad[i] + (1.0 - new_fac) * avg_grad[i] ;
            }
         }                                   // Loop: for all iters at a temp

/*
   Iters within temp loop now complete
*/

      sprintf ( msg , " %.3lf%% improved  avg=%.5lf  best=%.5lf",
         100.0 * n_good / (double) (n_good+n_bad), avg_func, bestfval ) ;
      progress_message ( msg ) ;

      if (use_grad) {
         gradlen = 0.0 ;                        // Will cumulate grad length
         for (i=0 ; i<n_grad ; i++)             // Find gradient length
            gradlen += avg_grad[i] * avg_grad[i] ;
         gradlen = sqrt ( gradlen ) ;
         sprintf ( msg , "  grad=%.5lf", gradlen ) ;
         progress_message ( msg ) ;
         }

      if (bestfval <= fquit)  // If we reached the user's
         break ;              // limit, we can quit

      if (user_quit)
         break ;

      temp *= tempmult ;      // Reduce temp for next pass
      }                       // through this temperature loop


/*
   The trials left this weight set and neterr in random condition.
   Make them equal to the best, which will be the original
   if we never improved.
*/

FINISH:
   copy_weights ( this , bestnet ) ; // Return best weights in this net
   neterr = bestfval ;               // Trials destroyed weights, err

   if (reg) {
      MEMTEXT ( "SSG: delete SingularValueDecomp" ) ;
      delete sptr ;
      }

   if (user_quit)
      return 1 ;
   else
      return 0 ;
}
예제 #16
0
int main (
   int argc ,    // Number of command line arguments (includes prog name)
   char *argv[]  // Arguments (prog name is argv[0])
   )

{

/*
   Declarations of local variables
*/

/*
   User's command control line related variables are here.
   Control_file_number and control_files permit nesting of 'CONTROL' commands.
   If control_file_number equals -1, control commands are read from stdin.
   Otherwise they are read from that file in FILE *control_files.
   Up to MAX_CONTROL_FILES can be stacked.
*/

   int control_file_number = -1 ;           // Stack pointer for control files
   FILE *control_files[MAX_CONTROL_FILES] ; // This is the stack

   char *control_line ;    // User's commands here
   char *command, *rest ;  // Pointers to its command and parameter parts
   int n_command, n_rest ; // Lengths of those parts

/*
   These are network parameters which may be set by the user via commands.
   They are initialized to defaults which indicate that the user has not
   yet set them.  As they are set, their current values are placed here.
   When learning is done for a network, their values are copied from here
   into the network object.  When a network is read, the object's values
   are copied from it to here.  Otherwise, these variables are not used;
   the values in the network object itself are used.  The only purpose of
   these variables is to keep track of current values.
*/

   int net_model = -1 ;     // Network model (see NETMOD_? in CONST.H)
   int out_model = -1 ;     // Output model (see OUTMOD_? in CONST.H)
   int n_inputs = -1 ;      // Number of input neurons
   int n_outputs = -1 ;     // Number of output neurons
   int n_hidden1 = -1 ;     // Number of hidden layer one neurons
   int n_hidden2 = -1 ;     // Ditto layer 2 (0 if just one hidden layer)


   TrainingSet *tset = NULL ;            // Training set here
   Network *network = NULL ;             // Network here
   struct LearnParams learn_params ;     // General learning parameters
   struct AnnealParams anneal_params ;   // Simulated annealing parameters
   struct GenInitParams geninit_params ; // Genetic initialization parameters
   struct KohParams koh_params ;         // Kohonen parameters

   int classif_output = -1 ;  // Current class (0=reject) for classif training
   char out_file[80] = "" ;   // File for EXECUTE output
   double threshold ;         // CLASSIFY confusion reject cutoff

/*
   Miscellaneous variables
*/

   int i, n, m ;
   double p ;
   char *msg ;
   FILE *fp ;

/*
--------------------------------------------------------------------------------

   Program starts here.

   Verify that a careless user didn't fail to set the integer size
   correctly when compiling.

--------------------------------------------------------------------------------
*/

#if VERSION_16_BIT
   if (sizeof(int) > 2) {
      printf ( "\nRecompile with VERSION_16_BIT set to 0 in CONST.H" ) ;
      exit ( 1 ) ;
      }
#else
   if (sizeof(int) < 4) {
      printf ( "\nRecompile with VERSION_16_BIT set to 1 in CONST.H" ) ;
      exit ( 1 ) ;
      }
#endif

printf ( "\nNEURAL - Program to train and test neural networks" ) ;
printf("\nCopyright (c) 1993 by Academic Press, Inc.");
printf("\nAll rights reserved.  Permission is hereby granted, until further notice,");
printf("\nto make copies of this diskette, which are not for resale, provided these");
printf("\ncopies are made from this master diskette only, and provided that the");
printf("\nfollowing copyright notice appears on the diskette label:");
printf("\n(c) 1993 by Academic Press, Inc.");
printf("\nExcept as previously stated, no part of the computer program embodied in");
printf("\nthis diskette may be reproduced or transmitted in any form or by any means,");
printf("\nelectronic or mechanical, including input into storage in any information");
printf("\nsystem for resale, without permission in writing from the publisher.");
printf("\nProduced in the United States of America.");
printf("\nISBN 0-12-479041-0");

/*
   Process command line parameters
*/

   mem_name[0] = 0 ;  // Default is no memory allocation file

   for (i=1 ; i<argc ; i++) {  // Process all command line args
      str_to_upr ( argv[i] ) ; // Easier if all upper case

      if (! strcmp ( argv[i] , "/DEBUG" )) {
         sscanf ( argv[++i] , "%s" , mem_name ) ;
         if ((strlen ( mem_name ) > 1)  ||  ! isalpha ( mem_name[0] )) {
            printf ( "\nIllegal DEBUG drive (%s); must be 1 letter." ) ;
            exit ( 1 ) ;
            }
         continue ;
         }

      printf ( "\nIllegal command line parameter (%s)", argv[i] ) ;
      exit ( 1 ) ;
      }

/*
   Initialize memory allocation debugging
*/

   if (strlen ( mem_name )) {
      strcat ( mem_name , ":mem.log" ) ;
      fp = fopen ( mem_name , "wt" ) ;
      if (fp == NULL) {
         printf ( "\nCannot open debugging file %s", mem_name ) ;
         exit ( 1 ) ;
         }
      fclose ( fp ) ;
      mem_log = 1 ;
      }
   else 
      mem_log = 0 ;

   mem_used = 0 ;

/*
   Initialize defaults
*/

   learn_params.init = -1 ;
   learn_params.quit_err = 0.0 ;
   learn_params.retries = 32767 ;

   anneal_params.temps0 = 3 ;
   anneal_params.temps = 4 ;
   anneal_params.iters0 = 50 ;
   anneal_params.iters = 20 ;
   anneal_params.setback0 = 50 ;
   anneal_params.setback = 20 ;
   anneal_params.start0 = 3.0 ;
   anneal_params.start = 4.0 ;
   anneal_params.stop0 = 1.0 ;
   anneal_params.stop = 0.02 ;

   geninit_params.pool = 50 ;
   geninit_params.gens = 3 ;
   geninit_params.climb = 0 ;
   geninit_params.overinit = 1.5 ;
   geninit_params.pcross = 0.8 ;
   geninit_params.pmutate = 0.0001 ;

   koh_params.normalization = 0 ;  // 0=multiplicative, 1=Z 
   koh_params.learn_method = 1 ;   // 0=additive, 1=subtractive
   koh_params.rate = 0.4 ;         // learning rate
   koh_params.reduction = 0.99 ;   // learning rate reduction

   learn_params.ap = &anneal_params ;
   learn_params.gp = &geninit_params ;
   learn_params.kp = &koh_params ;

   act_func_init () ; // Initialize interpolation table for activation function

   MEMTEXT ( "NEURAL: control_line, msg" ) ;
   if (((control_line = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL)
    || ((msg = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL)) {
      printf ( "\nInsufficient memory" ) ;
      exit ( 1 ) ;
      }

/*
   Main loop processes all commands
*/

   for (;;) {

      get_control_line ( control_line , &control_file_number, control_files ) ;

      split_control_line ( control_line , &command , &n_command ,
                           &rest , &n_rest ) ;

      if (! n_command) {
         if (n_rest) {
            sprintf ( msg , "No colon after command: %s", rest ) ;
            error_message ( msg ) ;
            }
         continue ;
         }

      sprintf ( msg , "%s : %s", command, rest ) ;
      normal_message ( msg ) ;

/*
   Act on the command
*/

      if (! strcmp ( command , "QUIT" ))
         break ;

      if (! strcmp ( command , "CONTROL" )) {
         stack_control_file ( rest , &control_file_number , control_files ) ;
         continue ;
         }

      if (! strcmp ( command , "NETWORK MODEL" )) {
         if (! strcmp ( rest , "LAYER" ))
            n = NETMOD_LAYER ;
         else if (! strcmp ( rest , "KOHONEN" ))
            n = NETMOD_KOH ;
         else {
            sprintf ( msg , "Illegal NETWORK MODEL: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (net_model == n)
            continue ;
         if (ok_to_clear_weights( &network )) {
            net_model = n ;
            learn_params.init = -1 ;
            }
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "OUTPUT MODEL" )) {
         if (! strcmp ( rest , "CLASSIFY" ))
            n = OUTMOD_CLASSIFY ;
         else if (! strcmp ( rest , "AUTO" ))
            n = OUTMOD_AUTO ;
         else if (! strcmp ( rest , "GENERAL" ))
            n = OUTMOD_GENERAL ;
         else {
            sprintf ( msg , "Illegal OUTPUT MODEL: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (out_model == n)
            continue ;
         if ((ok_to_clear_tset( &tset )) && (ok_to_clear_weights( &network)))
            out_model = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "N INPUTS" )) {
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n <= 0)  ||  (n > MAX_INPUTS)) {
            sprintf ( msg , "Illegal N INPUTS: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (n_inputs == n)
            continue ;
         if ((ok_to_clear_tset( &tset)) && (ok_to_clear_weights(&network)))
            n_inputs = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "N OUTPUTS" )) {
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n <= 0)  ||  (n > MAX_OUTPUTS)) {
            sprintf ( msg , "Illegal N OUTPUTS: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (n_outputs == n)
            continue ;
         if ((ok_to_clear_tset( &tset)) && (ok_to_clear_weights(&network)))
            n_outputs = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "N HIDDEN1" )) {
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n < 0)  ||  (n > MAX_HIDDEN)) {
            sprintf ( msg , "Illegal N HIDDEN1: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (n_hidden1 == n)
            continue ;
         if (ok_to_clear_weights( &network ))
            n_hidden1 = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "N HIDDEN2" )) {
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n < 0)  ||  (n > MAX_HIDDEN)) {
            sprintf ( msg , "Illegal N HIDDEN2: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (n  &&  ! n_hidden1) {
            error_message ( "N HIDDEN2 must be 0 if N HIDDEN1 IS 0." ) ;
            continue ;
            }
         if (n_hidden2 == n)
            continue ;
         if (ok_to_clear_weights( &network ))
            n_hidden2 = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "TRAIN" )) {
         if ((out_model == OUTMOD_AUTO)  &&  (n_outputs != n_inputs)) {
            warning_message ( "Setting N OUTPUTS = N INPUTS" ) ;
            n_outputs = n_inputs ;
            }
         if (out_model <= 0)
            error_message ( "TRAIN used before OUTPUT MODEL set." ) ;
         else if (n_inputs <= 0)
            error_message ( "TRAIN used before N INPUTS set." ) ;
         else if (n_outputs <= 0)
            error_message ( "TRAIN used before N OUTPUTS set." ) ;
         else if ((net_model != NETMOD_KOH) && (out_model == OUTMOD_CLASSIFY)
                  &&  (classif_output < 0))
            error_message( "CLASSIFY output mode but CLASSIFY OUTPUT not set.");
         else if ((net_model == NETMOD_KOH)  &&  (out_model != OUTMOD_CLASSIFY))
            error_message( "KOHONEN network requires CLASSIFY output mode.");
         else {
            if (tset == NULL) {
               MEMTEXT ( "NEURAL: new tset" ) ;
               tset = new TrainingSet ( out_model , n_inputs , n_outputs ) ;
               }
            tset->train ( rest , classif_output ) ;
            }
         continue ;
         }

      if (check_anneal ( command , rest , &anneal_params ))
         continue ;

      if (check_genetic ( command , rest , &geninit_params ))
         continue ;

      if (check_kohonen ( command , rest , &koh_params , &network ))
         continue ;

      if (check_learn_params ( command , rest , &learn_params , net_model ))
         continue ;

      if (! strcmp ( command , "LEARN" )) {
         if ((tset == NULL)  ||  (tset->ntrain == 0)) {
            error_message ( "Cannot LEARN; No training set exists." ) ;
            continue ;
            }
         if ((net_model == NETMOD_KOH)  &&  (out_model != OUTMOD_CLASSIFY)) {
            error_message( "KOHONEN network requires CLASSIFY output mode.");
            continue ;
            }
         if (learn_params.init < 0) {
            error_message( "Initialization method not set.");
            continue ;
            }
         if (network == NULL) {
            if (net_model == NETMOD_LAYER) {
               if (n_hidden1 < 0) {
                  error_message ( "LEARN used before N HIDDEN1 set." ) ;
                  continue ;
                  }
               else if (n_hidden2 < 0) {
                  error_message ( "LEARN used before N HIDDEN2 set." ) ;
                  continue ;
                  }
               else {
                  MEMTEXT ( "NEURAL: new LayerNet" ) ;
                  network = new LayerNet ( out_model , n_inputs , n_hidden1 ,
                                           n_hidden2 , n_outputs , 1 , 1 ) ;
                  }
               }
            else if (net_model == NETMOD_KOH) {
               MEMTEXT ( "NEURAL: new KohNet" ) ;
               network = new KohNet ( n_inputs , n_outputs ,
                                      &koh_params , 1 , 1 ) ;
               }
            }
         if ((network == NULL)  ||  (! network->ok)) {  // Malloc failure?
            memory_message ( "to create network." ) ;
            if (network != NULL) {
               delete network ;
               network = NULL ;
               }
            continue ;
            }
         network->learn ( tset , &learn_params ) ;
         if (network->neterr > 0.999999) {  // Indicates massive failure
            MEMTEXT ( "NEURAL: learn failure delete network" ) ;
            delete network ;
            network = NULL ;
            }
         else {
            sprintf ( msg , "Final error = %.4lf%% of max possible",
                      100.0 * network->neterr ) ;
            normal_message ( msg ) ;
            }
         continue ;
         }

      if (! strcmp ( command , "SAVE WEIGHTS" )) {
         if (network == NULL)
            error_message ( "There are no learned weights to save." ) ;
         else
            wt_save ( network , net_model , 0 , rest ) ;
         continue ;
         }

      if (! strcmp ( command , "RESTORE WEIGHTS" )) {
         if (network != NULL) {
            MEMTEXT ( "NEURAL: delete network for restore" ) ;
            delete network ;
            network = NULL ;
            }
         network = wt_restore ( rest , &net_model ) ;
         if (network == NULL)
            continue ;
         if (tset != NULL) {
            if ((tset->nin != network->nin)
             || (tset->nout != network->nout)
             || (tset->outmod != network->outmod)) {
               error_message ( "Network conflicts with existing training set.");
               continue ;
               }
            }
         out_model = network->outmod ;
         n_inputs = network->nin ;
         n_outputs = network->nout ;
         if (net_model == NETMOD_LAYER) {
            n_hidden1 = ((LayerNet*) network)->nhid1 ;
            n_hidden2 = ((LayerNet*) network)->nhid2 ;
            }
         if (net_model == NETMOD_KOH)
            koh_params.normalization = ((KohNet *) network)->normalization ;
         learn_params.init = -1 ;
         continue ;
         }

      if (! strcmp ( command , "CLEAR TRAINING" )) {
         if (tset != NULL) {
            MEMTEXT ( "NEURAL: delete tset" ) ;
            delete tset ;
            tset = NULL ;
            }
         continue ;
         }

      if (! strcmp ( command , "CLEAR WEIGHTS" )) {
         if (network != NULL) {
            MEMTEXT ( "NEURAL: delete network" ) ;
            delete network ;
            network = NULL ;
            }
         continue ;
         }

      if (! strcmp ( command , "CLASSIFY OUTPUT" )) {
         if (net_model == NETMOD_KOH) {
            error_message ( "Cannot specify output for KOHONEN model." ) ;
            continue ;
            }
         if (n_outputs < 0) {
            error_message ( "CLASSIFY OUTPUT used before N OUTPUTS set." ) ;
            continue ;
            }
         if (out_model != OUTMOD_CLASSIFY) {
            error_message
                  ( "CLASSIFY OUTPUT only valid when OUTPUT MODEL:CLASSIFY" ) ;
            continue ;
            }
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n < 0)) {
            sprintf ( msg , "Illegal CLASSIFY OUTPUT: %s", rest ) ;
            error_message ( msg ) ;
            }
         else if (n > n_outputs) {
            sprintf ( msg , "CLASSIFY OUTPUT (%d) exceeds N OUTPUTS (%d)",
                      n, n_outputs ) ;
            error_message ( msg ) ;
            }
         else
            classif_output = n ;
         continue ;
         }


      if (! strcmp ( command , "OUTPUT FILE" )) {
         strcpy ( out_file , rest ) ;
         continue ;
         }

      if (! strcmp ( command , "EXECUTE" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else
            network->execute_from_file ( rest , out_file ) ;
         continue ;
         }

      if (! strcmp ( command , "CLASSIFY" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
            error_message ( "CLASSIFY valid only in CLASSIFY output mode" ) ;
         else
            network->classify_from_file ( rest , threshold ) ;
         continue ;
         }

      if (! strcmp ( command , "RESET CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else
            network->reset_confusion () ;
         continue ;
         }

      if (! strcmp ( command , "CONFUSION THRESHOLD" )) {
         p = atof ( rest ) ;
         if ((p < 0.0)  ||  (p > 100.0)) {
            sprintf ( msg , "Illegal CONFUSION THRESHOLD: %s", rest ) ;
            error_message ( msg ) ;
            }
         else
            threshold = p / 100.0 ;
         continue ;
         }

      if (! strcmp ( command , "SHOW CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
            error_message ( "CONFUSION valid only in CLASSIFY output mode" ) ;
         else
            network->show_confusion () ;
         continue ;
         }

      if (! strcmp ( command , "SAVE CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
            error_message ( "CONFUSION valid only in CLASSIFY output mode" ) ;
         else
            network->save_confusion ( rest ) ;
         continue ;
         }

      sprintf ( msg , "Unknown command: %s", command ) ;
      error_message ( msg ) ;

      } // Endless command loop

   MEMTEXT ( "NEURAL: control_line, msg" ) ;
   FREE ( control_line ) ;
   FREE ( msg ) ;
   MEMCLOSE () ;
   return 0 ;
}