Example #1
0
void LayerNet::an1 ( TrainingSet *tptr , struct LearnParams *lptr )
{
   int itry, user_quit ;
   long seed ;
   double best_err ;
   char msg[80] ;
   LayerNet *worknet, *bestnet ;

/*
   Allocate scratch memory
*/

   MEMTEXT ( "AN1::learn new worknet, bestnet" ) ;
   worknet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 0 ) ;
   bestnet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 1 ) ;

   if ((worknet == NULL)  ||  (! worknet->ok)
    || (bestnet == NULL)  ||  (! bestnet->ok)) {
      memory_message ( "to learn" ) ;
      if (worknet != NULL)
         delete worknet ;
      if (bestnet != NULL)
         delete bestnet ;
      errtype = 0 ;
      return ;
      }

   best_err = 1.e30 ;
   for (itry=1 ; itry<=lptr->retries+1 ; itry++) {

      user_quit = anneal1 ( tptr , lptr , worknet , 1 , itry ) ;
      if (neterr < best_err) {
         best_err = neterr ;
         copy_weights ( bestnet , this ) ;
         }

      sprintf ( msg , "Try %d  err=%lf  best=%lf", itry, neterr, best_err ) ;
      normal_message ( msg ) ;

      if (user_quit  ||  (neterr < lptr->quit_err))
         break ;

      seed = flrand() - (long) (itry * 97) ;   // Insure new seed for anneal
      sflrand ( seed ) ;
      zero_weights () ;  // Retry random
      }

   copy_weights ( this , bestnet ) ;
   neterr = best_err ;
   MEMTEXT ( "AN1::learn delete worknet, bestnet" ) ;
   delete worknet ;
   delete bestnet ;
   return ;
}
Example #2
0
void Network::show_confusion ()
{
   int i ;
   char *msg ;

   MEMTEXT ( "CONFUSE:show msg" ) ;
   if ((msg = (char *) MALLOC ( (nout+1) * 5 + 11 )) == NULL ) {
      memory_message ( "to SHOW CONFUSION" ) ;
      return ;
      }

   strcpy ( msg , "Confusion:" ) ;
   for (i=0 ; i<nout ; i++)
      sprintf ( msg+5*i+10 , "%5d" , confusion[i] ) ;
   sprintf ( msg+5*nout+10, "%5d", confusion[nout] ) ;
   msg[5*nout+15] = 0 ;
   normal_message ( msg ) ;
   FREE ( msg ) ;
   return ;
}
Example #3
0
void LayerNet::anx_dd ( TrainingSet *tptr , struct LearnParams *lptr )
{
   int itry, n_escape, n_retry, bad_count, new_record, refined ;
   long seed ;
   double err, prev_err, best_err, start_of_loop_error, best_inner_error ;
   double initial_accuracy, final_accuracy ;
   char msg[80] ;
   LayerNet *worknet, *worknet2, *bestnet ;

   n_escape = n_retry = 0 ;

/*
   Allocate scratch memory
*/

   MEMTEXT ( "ANX_DD::learn new worknet, bestnet" ) ;
   worknet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 0 ) ;
   bestnet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 1 ) ;

   if ((worknet == NULL)  ||  (! worknet->ok)
    || (bestnet == NULL)  ||  (! bestnet->ok)) {
      memory_message ( "to learn" ) ;
      if (worknet != NULL)
         delete worknet ;
      if (bestnet != NULL)
         delete bestnet ;
      errtype = 0 ;
      return ;
      }

   if ((lptr->method == METHOD_AN2_CJ)  ||  (lptr->method == METHOD_AN2_LM)) {
      worknet2 = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                                nout , 0 , 0 ) ;
      if ((worknet2 == NULL)  ||  (! worknet2->ok)) {
         if (worknet2 != NULL)
            delete worknet2 ;
         delete worknet ;
         delete bestnet ;
         memory_message ( "to learn" ) ;
         errtype = 0 ;
         return ;
         }
      }
   else
      worknet2 = NULL ;

/*
   Start by annealing around the starting weights.  These will be zero if the
   net was just created.  If it was restored or partially trained already,
   they will be meaningful.  Anneal1 guarantees that it will not return all
   zero weights if there is at least one hidden layer, even if that means
   that the error exceeds the amount that could be attained by all zeros.
*/

   best_err = best_inner_error = 1.e30 ;
   if ((lptr->method == METHOD_AN1_CJ)  ||  (lptr->method == METHOD_AN1_LM))
      anneal1 ( tptr , lptr , worknet , 1 , 0 ) ;
   else if ((lptr->method == METHOD_AN2_CJ) || (lptr->method == METHOD_AN2_LM))
      anneal2 ( tptr , lptr , worknet , worknet2 , 1 ) ;

/*
   Do direct descent optimization, finding local minimum.
   Then anneal to break out of it.  If successful, loop back up to
   do direct descent again.  Otherwise restart totally random.
*/

   bad_count = 0 ;         // Handles flat local mins
   refined = 0 ;           // Did we ever refine to high resolution?  Not yet.
   new_record = 0 ;        // Refine every time a new inner error record set
   initial_accuracy = pow ( 10.0 , -lptr->cj_acc ) ;
   final_accuracy = initial_accuracy * pow ( 10.0 , -lptr->cj_refine ) ;

   for (itry=1 ; ; itry++) {

      if (neterr < best_err) {   // Keep track of best
         copy_weights ( bestnet , this ) ;
         best_err = neterr ;
         }

      sprintf ( msg , "Try %d  (best=%lf):", itry, best_err ) ;
      normal_message ( msg ) ;

      if (neterr <= lptr->quit_err)
         break ;

      start_of_loop_error = neterr ;
      if ((lptr->method == METHOD_AN1_CJ)  ||  (lptr->method == METHOD_AN2_CJ))
         err = conjgrad ( tptr , 32767 , initial_accuracy ,
                          lptr->quit_err , lptr->cj_progress ) ;
      else if ((lptr->method==METHOD_AN1_LM) || (lptr->method==METHOD_AN2_LM))
         err = lev_marq ( tptr , 32767 , initial_accuracy ,
                          lptr->quit_err , lptr->cj_progress ) ;
      neterr = fabs ( err ) ; // err<0 if user pressed ESCape

      sprintf ( msg , "  Gradient err=%lf", neterr ) ;
      progress_message ( msg ) ;

      if (neterr < best_err) {   // Keep track of best
         copy_weights ( bestnet , this ) ;
         best_err = neterr ;
         }

      if (err <= lptr->quit_err) { // err<0 if user pressed ESCape
         if (err < -1.e29)         // or insufficient memory
            printf ( "\nInsufficient memory for gradient learning." ) ;
         break ;
         }

      seed = flrand() - (long) (itry * 97) ;   // Insure new seed for anneal
      sflrand ( seed ) ;

      prev_err = neterr ;  // So we can see if anneal helped

      if ((lptr->method == METHOD_AN1_CJ)  ||  (lptr->method == METHOD_AN1_LM))
         anneal1 ( tptr , lptr , worknet , 0 , itry ) ;
      else if ((lptr->method==METHOD_AN2_CJ) || (lptr->method==METHOD_AN2_LM))
         anneal2 ( tptr , lptr , worknet , worknet2 , 0 ) ;

      sprintf ( msg , "  Anneal err=%lf", neterr ) ;
      progress_message ( msg ) ;

      if (neterr < best_err) {  // Keep track of best
         copy_weights ( bestnet , this ) ;
         best_err = neterr ;
         }

      if (best_err <= lptr->quit_err)
         break ;

      if (neterr < best_inner_error) {  // Keep track of best inner for refine
         best_inner_error = neterr ;
         new_record = 1 ;               // Tells us to refine
         }

      if ((prev_err - neterr) > 1.e-7) { // Did we break out of local min?
         if ((start_of_loop_error - neterr) < 1.e-3)
            ++bad_count ;  // Avoid many unprofitable iters
         else
            bad_count = 0 ;
         if (bad_count < 4) {
            ++n_escape ;          // Count escapes from local min
            continue ;            // Escaped, so gradient learn again
            }
         }

/*
   After first few tries, and after each inprovement thereafter, refine
   to high resolution
*/

      if ((itry-n_escape >= lptr->cj_pretries)  &&  (new_record || ! refined)) {
         if (! refined) {   // If refining the best of the pretries
            copy_weights ( this , bestnet ) ;  // Get that net
            neterr = best_err ;
            }
         refined = 1 ;     // Only force refine once
         new_record = 0 ;  // Reset new inner error record flag
         progress_message ( "  REFINING" ) ;
         if ((lptr->method == METHOD_AN1_CJ) || (lptr->method == METHOD_AN2_CJ))
            err = conjgrad ( tptr , 0 , final_accuracy ,
                             lptr->quit_err , lptr->cj_progress ) ;
         else if ((lptr->method==METHOD_AN1_LM)|| (lptr->method==METHOD_AN2_LM))
            err = lev_marq ( tptr , 0 , final_accuracy ,
                             lptr->quit_err , lptr->cj_progress ) ;
         neterr = fabs ( err ) ; // err<0 if user pressed ESCape
         sprintf ( msg , "  Attained err=%lf", neterr ) ;
         progress_message ( msg ) ;
         if (neterr < best_err) {  // Keep track of best
            copy_weights ( bestnet , this ) ;
            best_err = neterr ;
            }
         }

      if (++n_retry > lptr->retries)
         break ;

      progress_message ( "  RESTART" ) ;
      zero_weights () ;  // Failed to break out, so retry random
      seed = flrand() - (long) (itry * 773) ;   // Insure new seed for anneal
      sflrand ( seed ) ;
      if ((lptr->method == METHOD_AN1_CJ)  ||  (lptr->method == METHOD_AN1_LM))
         anneal1 ( tptr , lptr , worknet , 1 , itry ) ;
      else if ((lptr->method==METHOD_AN2_CJ) || (lptr->method==METHOD_AN2_LM))
         anneal2 ( tptr , lptr , worknet , worknet2 , 1 ) ;
      }

FINISH:
   copy_weights ( this , bestnet ) ;
   neterr = best_err ;
   MEMTEXT ( "AN1_DD::learn delete worknet, bestnet" ) ;
   delete worknet ;
   delete bestnet ;
   sprintf ( msg , "%d successful escapes, %d retries", n_escape, n_retry ) ;
   normal_message ( msg ) ;

   return ;
}
Example #4
0
//*******************************************************************
// WinMain - Neural main
//
// parameters:
//             hInstance     - The instance of this instance of this
//                             application.
//             hPrevInstance - The instance of the previous instance
//                             of this application. This will be 0
//                             if this is the first instance.
//             lpszCmdLine   - A long pointer to the command line that
//                             started this application.
//             cmdShow       - Indicates how the window is to be shown
//                             initially. ie. SW_SHOWNORMAL, SW_HIDE,
//                             SW_MIMIMIZE.
//
// returns:
//             wParam from last message.
//
//*******************************************************************
int PASCAL WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance,
						 LPSTR lpszCmdLine, int cmdShow)
{


/*
	Declarations of local variables
*/

	int control_file_number = -1 ;           // Stack pointer for control files
	FILE *control_files[MAX_CONTROL_FILES] ; // This is the stack

	char *control_line ;    // User's commands here
	char *command, *rest ;  // Pointers to its command and parameter parts
	int n_command, n_rest ; // Lengths of those parts

	int net_model = -1 ;     // Network model (see NETMOD_? in CONST.H)
	int out_model = -1 ;     // Output model (see OUTMOD_? in CONST.H)
	int n_inputs = -1 ;      // Number of input neurons
	int n_outputs = -1 ;     // Number of output neurons
	int n_hidden1 = -1 ;     // Number of hidden layer one neurons
	int n_hidden2 = -1 ;     // Ditto layer 2 (0 if just one hidden layer)


	TrainingSet *tset = NULL ;            // Training set here
	Network *network = NULL ;             // Network here
	struct LearnParams learn_params ;     // General learning parameters
	struct AnnealParams anneal_params ;   // Simulated annealing parameters
	struct GenInitParams geninit_params ; // Genetic initialization parameters
	struct KohParams koh_params ;         // Kohonen parameters

	int classif_output = -1 ;  // Current class (0=reject) for classif training
	char out_file[80] = "" ;   // File for EXECUTE output
	float threshold ;         // CLASSIFY confusion reject cutoff
	char resp_file[80]="";     // file for initializing output neuron's name
	char train_file[80]="";
/*
	Miscellaneous variables
*/

	int i, n, m ;
	float p ;
	char *msg ;
	FILE *fp ;
	unsigned long me,mc;
	char *fname;
	char *control;

#if VERSION_16_BIT
	if (sizeof(int) > 2) {
		printf ( "\nRecompile with VERSION_16_BIT set to 0 in CONST.H" ) ;
		exit ( 1 ) ;
		}
#else
	if (sizeof(int) < 4) {
		printf ( "\nRecompile with VERSION_16_BIT set to 1 in CONST.H" ) ;
		exit ( 1 ) ;
		}
#endif


printf ( "\nNEURAL SYSTEM - Program to train and test neural networks" ) ;

if (argc>1)
{
  strcpy(fname,argv[1]);
}


/*
   Process command line parameters
*/

   mem_name[0] = 0 ;  // Default is no memory allocation file
 /*
   if (strlen ( mem_name )) {
      strcat ( mem_name , ":mem.log" ) ;
      fp = fopen ( mem_name , "wt" ) ;
      if (fp == NULL) {
	 printf ( "\nCannot open debugging file %s", mem_name ) ;
	 exit ( 1 ) ;
	 }
      fclose ( fp ) ;
      mem_log = 1 ;
      }
   else
      mem_log = 0 ;
   */
   mem_log  = 0 ;
   mem_used = 0 ;
/*
   Initialize defaults
*/

   learn_params.init = -1 ;
   learn_params.quit_err = 0.0 ;
   learn_params.retries = 32767 ;

   anneal_params.temps0 = 3 ;
   anneal_params.temps = 4 ;
   anneal_params.iters0 = 50 ;
   anneal_params.iters = 20 ;
   anneal_params.setback0 = 50 ;
   anneal_params.setback = 20 ;
   anneal_params.start0 = 3.0 ;
   anneal_params.start = 4.0 ;
   anneal_params.stop0 = 1.0 ;
   anneal_params.stop = 0.02 ;

   geninit_params.pool = 50 ;
   geninit_params.gens = 3 ;
   geninit_params.climb = 0 ;
   geninit_params.overinit = 1.5 ;
   geninit_params.pcross = 0.8 ;
   geninit_params.pmutate = 0.0001 ;

   koh_params.normalization = 0 ;  // 0=multiplicative, 1=Z
   koh_params.learn_method = 1 ;   // 0=additive, 1=subtractive
   koh_params.rate = 0.4 ;         // learning rate
   koh_params.reduction = 0.99 ;   // learning rate reduction

   learn_params.ap = &anneal_params ;
   learn_params.gp = &geninit_params ;
   learn_params.kp = &koh_params ;


   act_func_init () ; // Initialize interpolation table for activation function

   MEMTEXT ( "NEURAL: control_line, msg" ) ;
   if (((control_line = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL)
    || ((msg = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL)) {
      printf ( "\nInsufficient memory" ) ;
      exit ( 1 ) ;
      }

/*
   Main loop processes all commands
*/

   for (;;) {
      if (argv[1])
      {
	 strcpy(control_line,"CONTROL:");
	 strcat(control_line,fname);
	 //printf("%s\n",control_line);
	 argv[1]=NULL;
      }
      else
	 get_control_line ( control_line , &control_file_number, control_files ) ;

      split_control_line ( control_line , &command , &n_command ,
			   &rest , &n_rest ) ;

      if (! n_command) {
	 if (n_rest) {
	    sprintf ( msg , "No colon after command: %s", rest ) ;
	    error_message ( msg ) ;
	    }
	 continue ;
	 }

      sprintf ( msg , "%s : %s", command, rest ) ;
      normal_message ( msg ) ;

/*
   Act on the command
*/

      if (! strcmp ( command , "QUIT" ))
	 break ;

      if (! strcmp ( command , "CONTROL" )) {
	 stack_control_file (rest, &control_file_number, control_files) ;
	 continue ;
	 }

      if (! strcmp ( command , "NETWORK MODEL" )) {
	 // Multi layer network
	 if (! strcmp ( rest , "LAYER" ))
	    n = NETMOD_LAYER ;
	 // Kohonen network
	 else if (! strcmp ( rest , "KOHONEN" ))
	    n = NETMOD_KOH ;
	 // Hopfield network
	 else if (! strcmp ( rest , "HOPFIELD" ))
	    n = NETMOD_HOP ;
	// Bidirectionnal associative memory network
	 else if (! strcmp ( rest , "BAM" ))
	    n = NETMOD_BAM ;

	 else {
	    sprintf ( msg , "Illegal NETWORK MODEL: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (net_model == n)
	    continue ;
	 if (ok_to_clear_weights( &network )) {
	    net_model = n ;
	    learn_params.init = -1 ;
	    }
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "OUTPUT MODEL" )) {
	 if (! strcmp ( rest , "CLASSIFY" ))
	    n = OUTMOD_CLASSIFY ;
	 else if (! strcmp ( rest , "AUTO" ))
	    n = OUTMOD_AUTO ;
	 else if (! strcmp ( rest , "GENERAL" ))
	    n = OUTMOD_GENERAL ;
	 else {
	    sprintf ( msg , "Illegal OUTPUT MODEL: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (out_model == n)
	    continue ;
	 if ((ok_to_clear_tset( &tset )) && (ok_to_clear_weights( &network)))
	    out_model = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "N INPUTS" )) {
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n <= 0)  ||  (n > MAX_INPUTS)) {
	    sprintf ( msg , "Illegal N INPUTS: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (n_inputs == n)
	    continue ;
	 if ((ok_to_clear_tset( &tset)) && (ok_to_clear_weights(&network)))
	    n_inputs = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "N OUTPUTS" )) {
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n <= 0)  ||  (n > MAX_OUTPUTS)) {
	    sprintf ( msg , "Illegal N OUTPUTS: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (n_outputs == n)
	    continue ;
	 if ((ok_to_clear_tset( &tset)) && (ok_to_clear_weights(&network)))
	    n_outputs = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "N HIDDEN1" )) {
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n < 0)  ||  (n > MAX_HIDDEN)) {
	    sprintf ( msg , "Illegal N HIDDEN1: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (n_hidden1 == n)
	    continue ;
	 if (ok_to_clear_weights( &network ))
	    n_hidden1 = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "N HIDDEN2" )) {
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n < 0)  ||  (n > MAX_HIDDEN)) {
	    sprintf ( msg , "Illegal N HIDDEN2: %s", rest ) ;
	    error_message ( msg ) ;
	    continue ;
	    }
	 if (n  &&  ! n_hidden1) {
	    error_message ( "N HIDDEN2 must be 0 if N HIDDEN1 IS 0." ) ;
	    continue ;
	    }
	 if (n_hidden2 == n)
	    continue ;
	 if (ok_to_clear_weights( &network ))
	    n_hidden2 = n ;
	 else
	    warning_message ( "Command aborted" ) ;
	 continue ;
	 }

      if (! strcmp ( command , "TRAIN" )) {
	 if ((out_model == OUTMOD_AUTO)  &&  (n_outputs != n_inputs)) {
	    warning_message ( "Setting N OUTPUTS = N INPUTS" ) ;
	    n_outputs = n_inputs ;
	    }
	 if (out_model <= 0)
	    error_message ( "TRAIN used before OUTPUT MODEL set." ) ;
	 else if (n_inputs <= 0)
	    error_message ( "TRAIN used before N INPUTS set." ) ;
	 else if (n_outputs <= 0)
	    error_message ( "TRAIN used before N OUTPUTS set." ) ;
	 else if ((net_model == NETMOD_HOP) && (n_inputs != n_outputs))
	    error_message("HOPFIELD netowork requires INPUTS = OUTPUTS.");
	 else if ((net_model == NETMOD_BAM) && (out_model != OUTMOD_GENERAL))
	    error_message("BAM network requires AUTO output mode.");
	 else if ((net_model == NETMOD_HOP) && (out_model != OUTMOD_AUTO))
	    error_message("HOFIELD network requires AUTO output mode.");
	 else if ((net_model != NETMOD_KOH) && (out_model == OUTMOD_CLASSIFY)
		  &&  (classif_output < 0))
	    error_message( "CLASSIFY output mode but CLASSIFY OUTPUT not set.");
	 else if ((net_model == NETMOD_KOH)  &&  (out_model != OUTMOD_CLASSIFY))
	    error_message( "KOHONEN network requires CLASSIFY output mode.");
	 else {
	    if (tset == NULL) {
	       MEMTEXT ( "NEURAL: new tset" ) ;
	       tset = new TrainingSet ( out_model , n_inputs , n_outputs ) ;
	       }
	    tset->train ( rest , classif_output ) ;
	    strcpy(train_file,rest);
	 }
	 continue ;
	 }

      if (check_anneal ( command , rest , &anneal_params ))
	 continue ;

      if (check_genetic ( command , rest , &geninit_params ))
	 continue ;

      if (check_kohonen ( command , rest , &koh_params , &network ))
	 continue ;

      if (check_learn_params ( command , rest , &learn_params , net_model ))
	 continue ;

      if (! strcmp ( command , "LEARN" )) {
	 if ((tset == NULL)  ||  (tset->ntrain == 0)) {
	    error_message ( "Cannot LEARN; No training set exists." ) ;
	    continue ;
	    }
	 if ((net_model == NETMOD_KOH)  &&  (out_model != OUTMOD_CLASSIFY)) {
	    error_message( "KOHONEN network requires CLASSIFY output mode.");
	    continue ;
	    }
	 if (learn_params.init < 0) {
	    error_message( "Initialization method not set.");
	    continue ;
	    }
	 if (network == NULL)
	 {
	    if (net_model == NETMOD_LAYER)
	    {
	       if (n_hidden1 < 0)
	       {
		  error_message ( "LEARN used before N HIDDEN1 set." ) ;
		  continue ;
	       }
	       else if (n_hidden2 < 0)
	       {
		  error_message ( "LEARN used before N HIDDEN2 set." ) ;
		  continue ;
	       }
	       else
	       {
		  MEMTEXT ( "NEURAL: new LayerNet" ) ;
		  network = new LayerNet ( out_model , n_inputs , n_hidden1 ,
					   n_hidden2 , n_outputs , 1 , 1 ) ;
	       }
	    }
	    else if (net_model == NETMOD_KOH)
	    {
	       MEMTEXT ( "NEURAL: new KohNet" ) ;
	       network = new KohNet ( n_inputs , n_outputs ,
				      &koh_params , 1 , 1 ) ;
	    }
	    else if (net_model == NETMOD_HOP)
	    {

	       MEMTEXT ( "NEURAL: new HopNet" );
	       network = new HopNet (n_inputs,n_outputs, 1,1);
	    }

	    else if (net_model == NETMOD_BAM)
	    {
	       MEMTEXT ("NEURAL: new BamNet");
	       network = new LayerNet ( out_model , n_inputs , n_hidden1 ,
					n_hidden2 , n_outputs , 1 , 1 ) ;

	    }
	 }
	 if ((network == NULL)  ||  (! network->ok)) {  // Malloc failure?
	    memory_message ( "to create network." ) ;
	    if (network != NULL) {
	       delete network ;
	       network = NULL ;
	       }
	    continue ;
	    }
	 normal_message("Learning...\n");
	 network->learn ( tset , &learn_params ) ;
	 normal_message("End of Learning\n");
	 if (network->neterr > 0.999999) {  // Indicates massive failure
	    MEMTEXT ( "NEURAL: learn failure delete network" ) ;
	    delete network ;
	    network = NULL ;
	    }
	 else {
	    sprintf ( msg , "Final error = %.4lf%% of max possible",
		      100.0 * network->neterr ) ;
	    normal_message ( msg ) ;
	    }
	 continue ;
	 }

      if (! strcmp ( command , "SAVE WEIGHTS" )) {
	 if (network == NULL)
	    error_message ( "There are no learned weights to save." ) ;
	 else
	    wt_save ( network , net_model , 0 , rest ) ;
	 continue ;
	 }

      if (! strcmp ( command , "RESTORE WEIGHTS" )) {
	 if (network != NULL) {
	    MEMTEXT ( "NEURAL: delete network for restore" ) ;
	    delete network ;
	    network = NULL ;
	    }
	 network = wt_restore ( rest , &net_model ) ;
	 if (network == NULL)
	    continue ;
	 if (tset != NULL) {
	    if ((tset->nin != network->nin)
	     || (tset->nout != network->nout)
	     || (tset->outmod != network->outmod)) {
	       error_message ( "Network conflicts with existing training set.");
	       continue ;
	       }
	    }
	 out_model = network->outmod ;
	 n_inputs = network->nin ;
	 n_outputs = network->nout ;
	 if (net_model == NETMOD_LAYER) {
	    n_hidden1 = ((LayerNet*) network)->nhid1 ;
	    n_hidden2 = ((LayerNet*) network)->nhid2 ;
	    }
	 if (net_model == NETMOD_KOH)
	    koh_params.normalization = ((KohNet *) network)->normalization ;
	 learn_params.init = -1 ;
	 continue ;
	 }

      if (! strcmp ( command , "CLEAR TRAINING" )) {
	 if (tset != NULL) {
	    MEMTEXT ( "NEURAL: delete tset" ) ;
	    delete tset ;
	    tset = NULL ;
	    }
	 continue ;
	 }

      if (! strcmp ( command , "CLEAR WEIGHTS" )) {
	 if (network != NULL) {
	    MEMTEXT ( "NEURAL: delete network" ) ;
	    delete network ;
	    network = NULL ;
	    }
	 continue ;
	 }

      if (! strcmp ( command , "CLASSIFY OUTPUT" )) {
	 if (net_model == NETMOD_KOH) {
	    error_message ( "Cannot specify output for KOHONEN model." ) ;
	    continue ;
	    }
	 if (n_outputs < 0) {
	    error_message ( "CLASSIFY OUTPUT used before N OUTPUTS set." ) ;
	    continue ;
	    }
	 if (out_model != OUTMOD_CLASSIFY) {
	    error_message
		  ( "CLASSIFY OUTPUT only valid when OUTPUT MODEL:CLASSIFY" ) ;
	    continue ;
	    }
	 m = sscanf ( rest , "%d" , &n ) ;
	 if ((m <= 0)  ||  (n < 0)) {
	    sprintf ( msg , "Illegal CLASSIFY OUTPUT: %s", rest ) ;
	    error_message ( msg ) ;
	    }
	 else if (n > n_outputs) {
	    sprintf ( msg , "CLASSIFY OUTPUT (%d) exceeds N OUTPUTS (%d)",
		      n, n_outputs ) ;
	    error_message ( msg ) ;
	    }
	 else
	    classif_output = n ;
	 continue ;
	 }

      if (! strcmp ( command , "OUTPUT FILE" )) {
	 strcpy ( out_file , rest ) ;
	 continue ;
	 }

      if (! strcmp ( command , "EXECUTE" ))
      {
	 if (network == NULL)
	    error_message ( "There is no trained network" ) ;
	 else
	 {
	    network->execute_from_file ( rest , out_file) ;
	    continue ;
	 }
      }

      if (! strcmp ( command , "TEST NETWORK" ))
      {
	 if (network == NULL)
	    error_message ( "There is no trained network" ) ;
	 else
	 {
	    network->test_from_file ( rest ,out_file,net_model) ;
	    continue ;
	 }
      }

      if (! strcmp ( command , "CLASSIFY" )) {
	 if (network == NULL)
	    error_message ( "There is no trained network" ) ;
	 else if (out_model != OUTMOD_CLASSIFY)
	    error_message ( "CLASSIFY valid only in CLASSIFY output mode" ) ;
	 else
	    network->classify_from_file ( rest , threshold ) ;
	 continue ;
	 }

      if (! strcmp ( command , "RESET CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else
            network->reset_confusion () ;
         continue ;
         }

      if (! strcmp ( command , "CONFUSION THRESHOLD" )) {
	 p = atof ( rest ) ;
	 if ((p < 0.0)  ||  (p > 100.0)) {
	    sprintf ( msg , "Illegal CONFUSION THRESHOLD: %s", rest ) ;
            error_message ( msg ) ;
            }
	 else
            threshold = p / 100.0 ;
         continue ;
         }

      if (! strcmp ( command , "SHOW CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
	    error_message ( "CONFUSION valid only in CLASSIFY output mode" ) ;
         else
            network->show_confusion () ;
         continue ;
	 }

      if (! strcmp ( command , "SAVE CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
            error_message ( "CONFUSION valid only in CLASSIFY output mode" ) ;
         else
            network->save_confusion ( rest ) ;
	 continue ;
         }

      sprintf ( msg , "Unknown command: %s", command ) ;
      error_message ( msg ) ;

      } // Endless command loop

   MEMTEXT ( "NEURAL: control_line, msg" ) ;
   FREE ( control_line ) ;
   FREE ( msg ) ;
   MEMCLOSE () ;
   exit ( 0 ) ;
}
Example #5
0
void LayerNet::ssg (
   TrainingSet *tptr ,        // Training set to use
   struct LearnParams *lptr , // User's general learning parameters
   int use_grad               // SS if zero, else SSG
   )
{
   int itry, user_quit, n, n_grad ;
   long seed ;
   double best_err, *work1, *work2, *grad, *avg_grad ;
   char msg[80] ;
   LayerNet *worknet1, *worknet2, *bestnet ;
                             
/*
   Allocate network scratch memory
*/

   MEMTEXT ( "SSG::new 2 worknets, bestnet" ) ;
   worknet1 = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                             nout , 0 , 0 ) ;
   worknet2 = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                             nout , 0 , 0 ) ;
   bestnet = new LayerNet ( model , outmod , outlin , nin , nhid1 , nhid2 ,
                            nout , 0 , 1 ) ;

   if ((worknet1 == NULL)  ||  (! worknet1->ok)
    || (worknet2 == NULL)  ||  (! worknet2->ok)
    || (bestnet == NULL)  ||  (! bestnet->ok)) {
      memory_message ( "to learn" ) ;
      if (worknet1 != NULL)
         delete worknet1 ;
      if (worknet2 != NULL)
         delete worknet2 ;
      if (bestnet != NULL)
         delete bestnet ;
      errtype = 0 ;
      return ;
      }

/*
   Allocate gradient work memory.
   Work1 is used for hidden layer 2 deltas in REAL model, and output
   activation partial derivatives and deltas in all COMPLEX models.
   Work2 is output deltas in REAL model, error difference in COMPLEX models.
*/

   if (use_grad) {
      if (nhid2)       // Must be REAL model if this is true
         n = nhid2 ;
      else if (model == NETMOD_COMPLEX_INPUT)
         n = nhid1  ?  nout * 2 + nhid1 * 2  :  nout * 2 ;
      else if (model == NETMOD_COMPLEX_HIDDEN)
         n = nout * 4  +  nhid1 * 4 ;
      else if (model == NETMOD_COMPLEX)
         n = nhid1  ?  nout * 6  +  nhid1 * 4  :  nout * 4 ;
      else
         n = 0 ;

      if (n) {
         MEMTEXT ( "SSG::work1" ) ;
         work1 = (double *) MALLOC ( n * sizeof(double) ) ;
         if (work1 == NULL) {
            memory_message ( "to learn" ) ;
            delete worknet1 ;
            delete worknet2 ;
            delete bestnet ;
            errtype = 0 ;
            return ;
            }
         }
      else
         work1 = NULL ;

      if (nhid1 == 0)               // No hidden layer
         n_grad = nout * nin_n ;
      else if (nhid2 == 0)          // One hidden layer
         n_grad = nhid1 * nin_n + nout * nhid1_n ;
      else                          // Two hidden layers
         n_grad = nhid1 * nin_n + nhid2 * nhid1_n + nout * nhid2_n ;

      MEMTEXT ( "SSG::3 work vectors" ) ;
      work2 = (double *) MALLOC ( nout_n * sizeof(double) ) ;
      grad = (double *) MALLOC ( n_grad * sizeof(double) ) ;
      avg_grad = (double *) MALLOC ( n_grad * sizeof(double) ) ;

      if ((work2 == NULL)  ||  (grad == NULL)  ||  (avg_grad == NULL)) {
         if (work1 != NULL)
            FREE ( work1 ) ;
         if (work2 != NULL)
            FREE ( work2 ) ;
         if (grad != NULL)
            FREE ( grad ) ;
         if (avg_grad != NULL)
            FREE ( avg_grad ) ;
         memory_message ( "to learn" ) ;
         delete worknet1 ;
         delete worknet2 ;
         delete bestnet ;
         errtype = 0 ;
         return ;
         }
      }
   else
      work1 = work2 = grad = avg_grad = NULL ;

   best_err = 1.e30 ;
   for (itry=1 ; itry<=lptr->retries+1 ; itry++) {

      user_quit = ssg_core ( tptr , lptr , worknet1 , worknet2 ,
                             work1 , work2 , grad , avg_grad , n_grad ) ;

      if (neterr < best_err) {
         best_err = neterr ;
         copy_weights ( bestnet , this ) ;
         }

      sprintf ( msg , "Try %d  err=%lf  best=%lf", itry, neterr, best_err ) ;
      normal_message ( msg ) ;

      if (user_quit  ||  (neterr < lptr->quit_err))
         break ;

      seed = flrand() - (long) (itry * 97) ;   // Insure new seed for anneal
      sflrand ( seed ) ;
      zero_weights () ;  // Retry random
      }

   copy_weights ( this , bestnet ) ;
   neterr = best_err ;

   MEMTEXT ( "AN1::learn delete 2 worknets, bestnet" ) ;
   delete worknet1 ;
   delete worknet2 ;
   delete bestnet ;

   if (use_grad) {
      if (work1 != NULL) {
         MEMTEXT ( "SSG::work1" ) ;
         FREE ( work1 ) ;
         }
      MEMTEXT ( "SSG::3 work vectors" ) ;
      FREE ( work2 ) ;
      FREE ( grad ) ;
      FREE ( avg_grad) ;
      }

   return ;

}
Example #6
0
int LayerNet::ssg_core (
   TrainingSet *tptr ,        // Training set to use
   struct LearnParams *lptr , // User's general learning parameters
   LayerNet *avgnet ,         // Work area used to keep average weights
   LayerNet *bestnet ,        // And the best so far
   double *work1 ,            // Gradient work vector
   double *work2 ,            // Ditto
   double *grad ,             // Ditto
   double *avg_grad ,         // Ditto
   int n_grad                 // Length of above vectors
   )
{
   int ntemps, niters, setback, reg, nvars, user_quit ;
   int i, iter, itemp, n_good, n_bad, use_grad ;
   char msg[80] ;
   double tempmult, temp, fval, bestfval, starttemp, stoptemp, fquit ;
   double avg_func, new_fac, gradlen, grad_weight, weight_used ;
   enum RandomDensity density ;
   SingularValueDecomp *sptr ;
   struct AnnealParams *aptr ; // User's annealing parameters

   aptr = lptr->ap ;

   ntemps = aptr->temps0 ;
   niters = aptr->iters0 ;
   setback = aptr->setback0 ;
   starttemp = aptr->start0 ;
   stoptemp = aptr->stop0 ;
   if (aptr->random0 == ANNEAL_GAUSSIAN)
      density = NormalDensity ;
   else if (aptr->random0 == ANNEAL_CAUCHY)
      density = CauchyDensity ;

   if (! (ntemps * niters))
      return 0 ;

/*
   Initialize other local parameters.  Note that there is no sense using
   regression if there are no hidden layers.
*/

   use_grad = (grad != NULL) ;
   fquit = lptr->quit_err ;
   reg = nhid1 ;

/*
   Allocate the singular value decomposition object for REGRESS.
   Also allocate a work area for REGRESS to preserve matrix.
*/

   if (reg) {                 // False if no hidden layers
      if (nhid2 == 0)         // One hidden layer
         nvars = nhid1_n ;
      else                    // Two hidden layers
         nvars = nhid2_n ;

      i = (model == NETMOD_COMPLEX)  ?  2 * tptr->ntrain : tptr->ntrain ;

      if (i < nvars) {
         warning_message ( "Too few training sets for regression." ) ;
         reg = 0 ;
         }
      else {
         MEMTEXT ( "SSG: new SingularValueDecomp" ) ;
         sptr = new SingularValueDecomp ( i , nvars , 1 ) ;

         if ((sptr == NULL)  || ! sptr->ok) {
            memory_message (
               "for SS(G) with regression.  Using total randomization.");
            if (sptr != NULL)
               delete sptr ;
            reg = 0 ;
            }
         }
      }

/*
   For the basic algorithm, we will keep the current 'average' network
   weight set in avgnet.  This will be the moving center about which the
   perturbation is done.
   Although not directly related to the algorithm itself, we keep track
   of the best network ever found in bestnet.  That is what the user
   will get at the end.
*/

   copy_weights ( bestnet , this ) ; // Current weights are best so far
   copy_weights ( avgnet , this ) ;  // Center of perturbation
   bestfval = trial_error ( tptr ) ;

/*
   If this is being used to initialize the weights, make sure that they are
   not identically zero.  Do this by setting bestfval huge so that
   SOMETHING is accepted later.
*/

   if (nhid1) {
      i = nhid1 * nin_n ;
      while (i--) {
         if (fabs(hid1_coefs[i]) > 1.e-10)
            break ;
         }
      if (i < 0)
         bestfval = 1.e30 ;
      }

/*
   Initialize by cumulating a bunch of points
*/

   normal_message ( "Initializing..." ) ;
   avg_func = 0.0 ;                       // Mean function around center
   if (use_grad) {
      for (i=0 ; i<n_grad ; i++)          // Zero the mean gradient
         avg_grad[i] = 0.0 ;
      }

   for (iter=0 ; iter<niters ; iter++) {  // Initializing iterations

      perturb ( avgnet , this , starttemp , reg , density ) ; // Move point

      if (reg)                            // If using regression, estimate
         fval = regress ( tptr , sptr ) ; // out weights now, ignore fval
      if (use_grad)                       // Also need gradient?
         fval = gradient ( tptr , work1 , work2 , grad ) ; // fval redundant
      else if (! reg)                     // If reg we got fval from regress
         fval = trial_error ( tptr ) ;

      avg_func += fval ;                  // Cumulate mean function

      if (use_grad) {                     // Also need gradient?
         for (i=0 ; i<n_grad ; i++)       // Cumulate mean gradient
            avg_grad[i] += grad[i] ;
         }

      if (fval < bestfval) {              // If this iteration improved
         bestfval = fval ;                // then update the best so far
         copy_weights ( bestnet , this ) ; // Keep the network
         if (bestfval <= fquit)           // If we reached the user's
            goto FINISH ;                 // limit, we can quit
         }

      if ((user_quit = user_pressed_escape ()) != 0)
         goto FINISH ;

      } // Loop: for all initial iters

   avg_func /= niters ;          // Mean of all points around avgnet
   new_fac = 1.0 / niters ;      // Weight of each point

   sprintf ( msg , "  avg=%.6lf  best=%.6lf", avg_func, bestfval ) ;
   progress_message ( msg ) ;

   if (use_grad) {               // Also need gradient?
      gradlen = 0.0 ;            // Will cumulate grad length
      for (i=0 ; i<n_grad ; i++) {  // Find gradient mean and length
         avg_grad[i] /= niters ;
         gradlen += avg_grad[i] * avg_grad[i] ;
         }
      gradlen = sqrt ( gradlen ) ;
      grad_weight = 0.5 ;
      }

/*
   This is the temperature reduction loop and the iteration within
   temperature loop.
*/

   temp = starttemp ;
   tempmult = exp( log( stoptemp / starttemp ) / (ntemps-1)) ;
   user_quit = 0 ;                           // Flags user pressed ESCape

   for (itemp=0 ; itemp<ntemps ; itemp++) {  // Temp reduction loop

      n_good = n_bad = 0 ;                   // Counts better and worse

      sprintf ( msg , "Temp=%.3lf ", temp ) ;
      normal_message ( msg ) ;

      for (iter=0 ; iter<niters ; iter++) {  // Iters per temp loop

         if ((n_bad >= 10)  &&
             ((double) n_good / (double) (n_good+n_bad)  <  0.15))
            break ;

         perturb ( avgnet , this , temp ,
                   reg , density ) ;         // Randomly perturb about center

         if (use_grad)                       // Bias per gradient?
            weight_used = shift ( grad , this , grad_weight , reg ) ;

         if (reg) {                          // If using regression, estimate
            fval = regress ( tptr , sptr ) ; // out weights now
            if ((user_quit = user_pressed_escape ()) != 0)
               break ;
            if (fval >= avg_func) {          // If this would raise mean
               ++n_bad ;                     // Count this bad point for user
               continue ;                    // Skip it and try again
               }
            }

         if (use_grad)                       // Need gradient, fval redundant
            fval = gradient ( tptr , work1 , work2 , grad ) ;
         else if (! reg)                     // If reg we got fval from regress
            fval = trial_error ( tptr ) ;

         if ((user_quit = user_pressed_escape ()) != 0)
            break ;

         if (fval >= avg_func) {             // If this would raise mean
            ++n_bad ;                        // Count this bad point for user
            continue ;                       // Skip it and try again
            }

         ++n_good ;

         if (fval < bestfval) {              // If this iteration improved
            bestfval = fval ;                // then update the best so far
            copy_weights ( bestnet , this ) ; // Keep the network

            if (bestfval <= fquit)           // If we reached the user's
               break ;                       // limit, we can quit

            iter -= setback ;                // It often pays to keep going
            if (iter < 0)                    // at this temperature if we
               iter = 0 ;                    // are still improving
            }

         adjust ( avgnet , this , reg , new_fac ) ; // Move center slightly
         avg_func = new_fac * fval  +  (1.0 - new_fac) * avg_func ;
         if (use_grad) {
            grad_weight = new_fac * weight_used + (1.0 - new_fac) * grad_weight ;
            for (i=0 ; i<n_grad ; i++)          // Adjust mean gradient
               avg_grad[i] = new_fac * grad[i] + (1.0 - new_fac) * avg_grad[i] ;
            }
         }                                   // Loop: for all iters at a temp

/*
   Iters within temp loop now complete
*/

      sprintf ( msg , " %.3lf%% improved  avg=%.5lf  best=%.5lf",
         100.0 * n_good / (double) (n_good+n_bad), avg_func, bestfval ) ;
      progress_message ( msg ) ;

      if (use_grad) {
         gradlen = 0.0 ;                        // Will cumulate grad length
         for (i=0 ; i<n_grad ; i++)             // Find gradient length
            gradlen += avg_grad[i] * avg_grad[i] ;
         gradlen = sqrt ( gradlen ) ;
         sprintf ( msg , "  grad=%.5lf", gradlen ) ;
         progress_message ( msg ) ;
         }

      if (bestfval <= fquit)  // If we reached the user's
         break ;              // limit, we can quit

      if (user_quit)
         break ;

      temp *= tempmult ;      // Reduce temp for next pass
      }                       // through this temperature loop


/*
   The trials left this weight set and neterr in random condition.
   Make them equal to the best, which will be the original
   if we never improved.
*/

FINISH:
   copy_weights ( this , bestnet ) ; // Return best weights in this net
   neterr = bestfval ;               // Trials destroyed weights, err

   if (reg) {
      MEMTEXT ( "SSG: delete SingularValueDecomp" ) ;
      delete sptr ;
      }

   if (user_quit)
      return 1 ;
   else
      return 0 ;
}
Example #7
0
float LayerNet::find_grad (
   TrainingSet *tptr ,
   float *hid2delta ,
   float *outdelta ,
   float *grad
   )
{
   int i, j, size, tset, tclass, n, nprev, nnext ;
   float error, *dptr, diff, delta, *hid1grad, *hid2grad, *outgrad ;
   float *outprev,  *prevact, *nextcoefs, *nextdelta, *gradptr ;
   char msg[80];
/*
   Compute size of each training sample
*/

   if (outmod == OUTMOD_CLASSIFY)
      size = nin + 1 ;
   else if (outmod == OUTMOD_AUTO)
      size = nin ;
   else if (outmod == OUTMOD_GENERAL)
      size = nin + nout ;

/*
   Compute length of grad vector and gradient positions in it.
   Also point to layer previous to output and its size.
   Ditto for layer after hid1.
*/

   if (nhid1 == 0) {      // No hidden layer
      n = nout * (nin+1) ;
      outgrad = grad ;
      nprev = nin ;
      }
   else if (nhid2 == 0) { // One hidden layer
      n = nhid1 * (nin+1) + nout * (nhid1+1) ;
      hid1grad = grad ;
      outgrad = grad + nhid1 * (nin+1) ;
      outprev = hid1 ;
      nprev = nhid1 ;
      nnext = nout ;
      nextcoefs = out_coefs ;
      nextdelta = outdelta ;
      }
   else {                 // Two hidden layers
      n = nhid1 * (nin+1) + nhid2 * (nhid1+1) + nout * (nhid2+1) ;
      hid1grad = grad ;
      hid2grad = grad + nhid1 * (nin+1) ;
      outgrad = hid2grad + nhid2 * (nhid1+1) ;
      outprev = hid2 ;
      nprev = nhid2 ;
      nnext = nhid2 ;
      nextcoefs = hid2_coefs ;
      nextdelta = hid2delta ;
      }

   for (i=0 ; i<n ; i++)  // Zero gradient for summing
      grad[i] = 0.0 ;

   error = 0.0 ;  // Will cumulate total error here
   for (tset=0 ; tset<tptr->ntrain ; tset++) { // Do all samples

      sprintf ( msg , "Learning Pattern Nø %d ", tset) ;
      normal_message ( msg ) ;

      dptr = tptr->data + size * tset ;     // Point to this sample
      trial ( dptr ) ;                      // Evaluate network for it

      if (outmod == OUTMOD_AUTO) {          // If this is AUTOASSOCIATIVE
	 for (i=0 ; i<nout ; i++) {         // then the expected outputs
	    diff = *dptr++ - out[i] ;       // are just the inputs
	    error += diff * diff ;
	    outdelta[i] = diff * actderiv ( out[i] ) ;
	    }
         }

      else if (outmod == OUTMOD_CLASSIFY) {  // If this is Classification
	 tclass = (int) dptr[nin] - 1 ;     // class is stored after inputs
         for (i=0 ; i<nout ; i++) {         // Recall that train added a
            if (tclass == i)                // fraction so that the above
               diff = NEURON_ON - out[i] ;  // truncation to get tclass is
            else                            // always safe in any radix
               diff = NEURON_OFF - out[i] ;
            error += diff * diff ;
	    outdelta[i] = diff * actderiv ( out[i] ) ;
            }
         }

      else if (outmod == OUTMOD_GENERAL) {  // If this is GENERAL output
	 dptr += nin ;                      // outputs stored after inputs
         for (i=0 ; i<nout ; i++) {
            diff = *dptr++ - out[i] ;
            error += diff * diff ;
            outdelta[i] = diff * actderiv ( out[i] ) ;
            }
         }

/*
   Cumulate output gradient
*/

      if (nhid1 == 0)         // No hidden layer
         prevact = tptr->data + size * tset ;
      else
         prevact = outprev ;  // Point to previous layer
      gradptr = outgrad ;
      for (i=0 ; i<nout ; i++) {
         delta = outdelta[i] ;
	 for (j=0 ; j<nprev ; j++)
            *gradptr++ += delta * prevact[j] ;
         *gradptr++ += delta ;   // Bias activation is always 1
         }

/*
   Cumulate hid2 gradient (if it exists)
*/
   
      if (nhid2) {
         gradptr = hid2grad ;
         for (i=0 ; i<nhid2 ; i++) {
	    delta = 0.0 ;
            for (j=0 ; j<nout ; j++)
               delta += outdelta[j] * out_coefs[j*(nhid2+1)+i] ;
            delta *= actderiv ( hid2[i] ) ;
            hid2delta[i] = delta ;
	    for (j=0 ; j<nhid1 ; j++)
               *gradptr++ += delta * hid1[j] ;
            *gradptr++ += delta ;   // Bias activation is always 1
            }
         }

/*
   Cumulate hid1 gradient (if it exists)
*/
   
      if (nhid1) {
	 prevact = tptr->data + size * tset ;
	 gradptr = hid1grad ;
         for (i=0 ; i<nhid1 ; i++) {
            delta = 0.0 ;
            for (j=0 ; j<nnext ; j++)
               delta += nextdelta[j] * nextcoefs[j*(nhid1+1)+i] ;
            delta *= actderiv ( hid1[i] ) ;
            for (j=0 ; j<nin ; j++)
	       *gradptr++ += delta * prevact[j] ;
            *gradptr++ += delta ;   // Bias activation is always 1
            }
         }

      } // for all tsets
   
   return error / ((float) tptr->ntrain * (float) nout) ;
}
Example #8
0
float LayerNet::conjgrad (
   TrainingSet *tptr , // Training set to use
   int maxits ,        // Maximum iterations allowed
   float reltol ,     // Relative error change tolerance
   float errtol       // Quit if error drops this low
   )
{
   int i, j, n, iter, pnum, key, retry, max_retry ;
   float gam, *g, *h, *outdelta, *hid2delta, *grad, *base ;
   float corr, error, *cptr, *gptr, *pptr, maxgrad ;
   float prev_err ;
   char msg[80];
   max_retry = 5 ;

/*
   Allocate work memory
*/

   MEMTEXT ( "CONJGRAD work" ) ;
   if (nhid2) {
      hid2delta = (float *) MALLOC ( nhid2 * sizeof(float) ) ;
      if (hid2delta == NULL)
         return -2.0 ;
      }
   else
      hid2delta = NULL ;

   outdelta = (float *) MALLOC ( nout * sizeof(float) ) ;

   if (nhid1 == 0)               // No hidden layer
      n = nout * (nin+1) ;
   else if (nhid2 == 0)          // One hidden layer
      n = nhid1 * (nin+1) + nout * (nhid1+1) ;
   else                          // Two hidden layers
      n = nhid1 * (nin+1) + nhid2 * (nhid1+1) + nout * (nhid2+1) ;

   grad = (float *) MALLOC ( n * sizeof(float) ) ;
   base = (float *) MALLOC ( n * sizeof(float) ) ;
   g = (float *) MALLOC ( n * sizeof(float) ) ;
   h = (float *) MALLOC ( n * sizeof(float) ) ;

   if ((outdelta == NULL) || (grad == NULL) ||
       (base == NULL) || (g == NULL) || (h == NULL)) {
      if (hid2delta != NULL)
         FREE ( hid2delta ) ;
      if (outdelta != NULL)
         FREE ( outdelta ) ;
      if (grad != NULL)
         FREE ( grad ) ;
      if (base != NULL)
	 FREE ( base ) ;
      if (g != NULL)
         FREE ( g ) ;
      if (h != NULL)
         FREE ( h ) ;
      return -2.0 ;   // Flags error
      }

   prev_err = 1.e30 ;
   error = find_grad ( tptr , hid2delta , outdelta , grad ) ;

   memcpy ( g , grad , n * sizeof(float) ) ;
   memcpy ( h , grad , n * sizeof(float) ) ;

/*
   Main iteration loop is here
*/

   for (iter=0 ; iter<maxits ; iter++) {  // Each iter is an epoch

/*
   Check current error against user's max.  Abort if user pressed ESCape
*/
      sprintf ( msg , "Gradient Finding...Iter Nø %d : Error = %lf %%", iter, 100.0 * error ) ;
      normal_message ( msg ) ;
      if (error <= errtol)   // If our error is within user's limit
	 break ;             // then we are done!

      if (error <= reltol)   // Generally not necessary: reltol<errtol in
         break ;             // practice, but help silly users

      if (kbhit()) {         // Was a key pressed?
         key = getch () ;    // Read it if so
         while (kbhit())     // Flush key buffer in case function key
            getch () ;       // or key was held down
         if (key == 27) {    // ESCape
            error = -error ; // Flags user that ESCape was pressed
            break ;
            }
         }

      prev_err = error ;
      error = direcmin ( tptr , error , 10 , 1.e-10 ,
                         0.5 , base , grad ) ;
      if (error < 0.0)  // Indicates user pressed ESCape
         goto CGFINISH ;

      if ((2.0 * (prev_err - error)) <=       // If this direc gave poor result
          (reltol * (prev_err + error + 1.e-10))) { // will use random direc
         prev_err = error ;                   // But first exhaust grad
         error = find_grad ( tptr , hid2delta , outdelta , grad ) ;
         error = direcmin ( tptr , error , 15 , 1.e-10 ,
                            1.e-3 , base , grad ) ;
         for (retry=0 ; retry<max_retry ; retry++) {
            for (i=0 ; i<n ; i++)
	       grad[i] = (float) (rand() - RANDMAX/2) / (RANDMAX * 10.0) ;
            error = direcmin ( tptr , error , 10 , 1.e-10 ,
                               1.e-2 , base , grad ) ;
            if (error < 0.0)  // Indicates user pressed ESCape
               goto CGFINISH ;
            if (retry < max_retry/2)
               continue ;
            if ((2.0 * (prev_err - error)) >
                (reltol * (prev_err + error + 1.e-10)))
               break ;   // Get out of retry loop if we improved enough
            } // For retry
         if (retry == max_retry)   // If we exhausted all tries
            break ;                // probably hopeless
	 memcpy ( g , grad , n * sizeof(float) ) ;
	 memcpy ( h , grad , n * sizeof(float) ) ;
         } // If this dir gave poor result

      prev_err = error ;

/*
   Setup for next iteration
*/

      error = find_grad ( tptr , hid2delta , outdelta , grad ) ;
      gam = gamma ( g , grad ) ;
      if (gam < 0.0)
         gam = 0.0 ;
      if (gam > 1.0)
         gam = 1.0 ;

      find_new_dir ( gam , g , h , grad ) ;
      }  // This is the end of the main iteration loop

/*
   Free work memory
*/

CGFINISH:
   MEMTEXT ( "CONJGRAD work" ) ;
   if (hid2delta != NULL)
      FREE ( hid2delta ) ;
   FREE ( outdelta ) ;
   FREE ( grad ) ;
   FREE ( base ) ;
   FREE ( g ) ;
   FREE ( h ) ;

   return error ;
}
Example #9
0
int main (
   int argc ,    // Number of command line arguments (includes prog name)
   char *argv[]  // Arguments (prog name is argv[0])
   )

{

/*
   Declarations of local variables
*/

/*
   User's command control line related variables are here.
   Control_file_number and control_files permit nesting of 'CONTROL' commands.
   If control_file_number equals -1, control commands are read from stdin.
   Otherwise they are read from that file in FILE *control_files.
   Up to MAX_CONTROL_FILES can be stacked.
*/

   int control_file_number = -1 ;           // Stack pointer for control files
   FILE *control_files[MAX_CONTROL_FILES] ; // This is the stack

   char *control_line ;    // User's commands here
   char *command, *rest ;  // Pointers to its command and parameter parts
   int n_command, n_rest ; // Lengths of those parts

/*
   These are network parameters which may be set by the user via commands.
   They are initialized to defaults which indicate that the user has not
   yet set them.  As they are set, their current values are placed here.
   When learning is done for a network, their values are copied from here
   into the network object.  When a network is read, the object's values
   are copied from it to here.  Otherwise, these variables are not used;
   the values in the network object itself are used.  The only purpose of
   these variables is to keep track of current values.
*/

   int net_model = -1 ;     // Network model (see NETMOD_? in CONST.H)
   int out_model = -1 ;     // Output model (see OUTMOD_? in CONST.H)
   int n_inputs = -1 ;      // Number of input neurons
   int n_outputs = -1 ;     // Number of output neurons
   int n_hidden1 = -1 ;     // Number of hidden layer one neurons
   int n_hidden2 = -1 ;     // Ditto layer 2 (0 if just one hidden layer)


   TrainingSet *tset = NULL ;            // Training set here
   Network *network = NULL ;             // Network here
   struct LearnParams learn_params ;     // General learning parameters
   struct AnnealParams anneal_params ;   // Simulated annealing parameters
   struct GenInitParams geninit_params ; // Genetic initialization parameters
   struct KohParams koh_params ;         // Kohonen parameters

   int classif_output = -1 ;  // Current class (0=reject) for classif training
   char out_file[80] = "" ;   // File for EXECUTE output
   double threshold ;         // CLASSIFY confusion reject cutoff

/*
   Miscellaneous variables
*/

   int i, n, m ;
   double p ;
   char *msg ;
   FILE *fp ;

/*
--------------------------------------------------------------------------------

   Program starts here.

   Verify that a careless user didn't fail to set the integer size
   correctly when compiling.

--------------------------------------------------------------------------------
*/

#if VERSION_16_BIT
   if (sizeof(int) > 2) {
      printf ( "\nRecompile with VERSION_16_BIT set to 0 in CONST.H" ) ;
      exit ( 1 ) ;
      }
#else
   if (sizeof(int) < 4) {
      printf ( "\nRecompile with VERSION_16_BIT set to 1 in CONST.H" ) ;
      exit ( 1 ) ;
      }
#endif

printf ( "\nNEURAL - Program to train and test neural networks" ) ;
printf("\nCopyright (c) 1993 by Academic Press, Inc.");
printf("\nAll rights reserved.  Permission is hereby granted, until further notice,");
printf("\nto make copies of this diskette, which are not for resale, provided these");
printf("\ncopies are made from this master diskette only, and provided that the");
printf("\nfollowing copyright notice appears on the diskette label:");
printf("\n(c) 1993 by Academic Press, Inc.");
printf("\nExcept as previously stated, no part of the computer program embodied in");
printf("\nthis diskette may be reproduced or transmitted in any form or by any means,");
printf("\nelectronic or mechanical, including input into storage in any information");
printf("\nsystem for resale, without permission in writing from the publisher.");
printf("\nProduced in the United States of America.");
printf("\nISBN 0-12-479041-0");

/*
   Process command line parameters
*/

   mem_name[0] = 0 ;  // Default is no memory allocation file

   for (i=1 ; i<argc ; i++) {  // Process all command line args
      str_to_upr ( argv[i] ) ; // Easier if all upper case

      if (! strcmp ( argv[i] , "/DEBUG" )) {
         sscanf ( argv[++i] , "%s" , mem_name ) ;
         if ((strlen ( mem_name ) > 1)  ||  ! isalpha ( mem_name[0] )) {
            printf ( "\nIllegal DEBUG drive (%s); must be 1 letter." ) ;
            exit ( 1 ) ;
            }
         continue ;
         }

      printf ( "\nIllegal command line parameter (%s)", argv[i] ) ;
      exit ( 1 ) ;
      }

/*
   Initialize memory allocation debugging
*/

   if (strlen ( mem_name )) {
      strcat ( mem_name , ":mem.log" ) ;
      fp = fopen ( mem_name , "wt" ) ;
      if (fp == NULL) {
         printf ( "\nCannot open debugging file %s", mem_name ) ;
         exit ( 1 ) ;
         }
      fclose ( fp ) ;
      mem_log = 1 ;
      }
   else 
      mem_log = 0 ;

   mem_used = 0 ;

/*
   Initialize defaults
*/

   learn_params.init = -1 ;
   learn_params.quit_err = 0.0 ;
   learn_params.retries = 32767 ;

   anneal_params.temps0 = 3 ;
   anneal_params.temps = 4 ;
   anneal_params.iters0 = 50 ;
   anneal_params.iters = 20 ;
   anneal_params.setback0 = 50 ;
   anneal_params.setback = 20 ;
   anneal_params.start0 = 3.0 ;
   anneal_params.start = 4.0 ;
   anneal_params.stop0 = 1.0 ;
   anneal_params.stop = 0.02 ;

   geninit_params.pool = 50 ;
   geninit_params.gens = 3 ;
   geninit_params.climb = 0 ;
   geninit_params.overinit = 1.5 ;
   geninit_params.pcross = 0.8 ;
   geninit_params.pmutate = 0.0001 ;

   koh_params.normalization = 0 ;  // 0=multiplicative, 1=Z 
   koh_params.learn_method = 1 ;   // 0=additive, 1=subtractive
   koh_params.rate = 0.4 ;         // learning rate
   koh_params.reduction = 0.99 ;   // learning rate reduction

   learn_params.ap = &anneal_params ;
   learn_params.gp = &geninit_params ;
   learn_params.kp = &koh_params ;

   act_func_init () ; // Initialize interpolation table for activation function

   MEMTEXT ( "NEURAL: control_line, msg" ) ;
   if (((control_line = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL)
    || ((msg = (char *) MALLOC ( CONTROL_LINE_LENGTH+1 )) == NULL)) {
      printf ( "\nInsufficient memory" ) ;
      exit ( 1 ) ;
      }

/*
   Main loop processes all commands
*/

   for (;;) {

      get_control_line ( control_line , &control_file_number, control_files ) ;

      split_control_line ( control_line , &command , &n_command ,
                           &rest , &n_rest ) ;

      if (! n_command) {
         if (n_rest) {
            sprintf ( msg , "No colon after command: %s", rest ) ;
            error_message ( msg ) ;
            }
         continue ;
         }

      sprintf ( msg , "%s : %s", command, rest ) ;
      normal_message ( msg ) ;

/*
   Act on the command
*/

      if (! strcmp ( command , "QUIT" ))
         break ;

      if (! strcmp ( command , "CONTROL" )) {
         stack_control_file ( rest , &control_file_number , control_files ) ;
         continue ;
         }

      if (! strcmp ( command , "NETWORK MODEL" )) {
         if (! strcmp ( rest , "LAYER" ))
            n = NETMOD_LAYER ;
         else if (! strcmp ( rest , "KOHONEN" ))
            n = NETMOD_KOH ;
         else {
            sprintf ( msg , "Illegal NETWORK MODEL: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (net_model == n)
            continue ;
         if (ok_to_clear_weights( &network )) {
            net_model = n ;
            learn_params.init = -1 ;
            }
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "OUTPUT MODEL" )) {
         if (! strcmp ( rest , "CLASSIFY" ))
            n = OUTMOD_CLASSIFY ;
         else if (! strcmp ( rest , "AUTO" ))
            n = OUTMOD_AUTO ;
         else if (! strcmp ( rest , "GENERAL" ))
            n = OUTMOD_GENERAL ;
         else {
            sprintf ( msg , "Illegal OUTPUT MODEL: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (out_model == n)
            continue ;
         if ((ok_to_clear_tset( &tset )) && (ok_to_clear_weights( &network)))
            out_model = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "N INPUTS" )) {
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n <= 0)  ||  (n > MAX_INPUTS)) {
            sprintf ( msg , "Illegal N INPUTS: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (n_inputs == n)
            continue ;
         if ((ok_to_clear_tset( &tset)) && (ok_to_clear_weights(&network)))
            n_inputs = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "N OUTPUTS" )) {
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n <= 0)  ||  (n > MAX_OUTPUTS)) {
            sprintf ( msg , "Illegal N OUTPUTS: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (n_outputs == n)
            continue ;
         if ((ok_to_clear_tset( &tset)) && (ok_to_clear_weights(&network)))
            n_outputs = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "N HIDDEN1" )) {
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n < 0)  ||  (n > MAX_HIDDEN)) {
            sprintf ( msg , "Illegal N HIDDEN1: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (n_hidden1 == n)
            continue ;
         if (ok_to_clear_weights( &network ))
            n_hidden1 = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "N HIDDEN2" )) {
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n < 0)  ||  (n > MAX_HIDDEN)) {
            sprintf ( msg , "Illegal N HIDDEN2: %s", rest ) ;
            error_message ( msg ) ;
            continue ;
            }
         if (n  &&  ! n_hidden1) {
            error_message ( "N HIDDEN2 must be 0 if N HIDDEN1 IS 0." ) ;
            continue ;
            }
         if (n_hidden2 == n)
            continue ;
         if (ok_to_clear_weights( &network ))
            n_hidden2 = n ;
         else
            warning_message ( "Command aborted" ) ;
         continue ;
         }

      if (! strcmp ( command , "TRAIN" )) {
         if ((out_model == OUTMOD_AUTO)  &&  (n_outputs != n_inputs)) {
            warning_message ( "Setting N OUTPUTS = N INPUTS" ) ;
            n_outputs = n_inputs ;
            }
         if (out_model <= 0)
            error_message ( "TRAIN used before OUTPUT MODEL set." ) ;
         else if (n_inputs <= 0)
            error_message ( "TRAIN used before N INPUTS set." ) ;
         else if (n_outputs <= 0)
            error_message ( "TRAIN used before N OUTPUTS set." ) ;
         else if ((net_model != NETMOD_KOH) && (out_model == OUTMOD_CLASSIFY)
                  &&  (classif_output < 0))
            error_message( "CLASSIFY output mode but CLASSIFY OUTPUT not set.");
         else if ((net_model == NETMOD_KOH)  &&  (out_model != OUTMOD_CLASSIFY))
            error_message( "KOHONEN network requires CLASSIFY output mode.");
         else {
            if (tset == NULL) {
               MEMTEXT ( "NEURAL: new tset" ) ;
               tset = new TrainingSet ( out_model , n_inputs , n_outputs ) ;
               }
            tset->train ( rest , classif_output ) ;
            }
         continue ;
         }

      if (check_anneal ( command , rest , &anneal_params ))
         continue ;

      if (check_genetic ( command , rest , &geninit_params ))
         continue ;

      if (check_kohonen ( command , rest , &koh_params , &network ))
         continue ;

      if (check_learn_params ( command , rest , &learn_params , net_model ))
         continue ;

      if (! strcmp ( command , "LEARN" )) {
         if ((tset == NULL)  ||  (tset->ntrain == 0)) {
            error_message ( "Cannot LEARN; No training set exists." ) ;
            continue ;
            }
         if ((net_model == NETMOD_KOH)  &&  (out_model != OUTMOD_CLASSIFY)) {
            error_message( "KOHONEN network requires CLASSIFY output mode.");
            continue ;
            }
         if (learn_params.init < 0) {
            error_message( "Initialization method not set.");
            continue ;
            }
         if (network == NULL) {
            if (net_model == NETMOD_LAYER) {
               if (n_hidden1 < 0) {
                  error_message ( "LEARN used before N HIDDEN1 set." ) ;
                  continue ;
                  }
               else if (n_hidden2 < 0) {
                  error_message ( "LEARN used before N HIDDEN2 set." ) ;
                  continue ;
                  }
               else {
                  MEMTEXT ( "NEURAL: new LayerNet" ) ;
                  network = new LayerNet ( out_model , n_inputs , n_hidden1 ,
                                           n_hidden2 , n_outputs , 1 , 1 ) ;
                  }
               }
            else if (net_model == NETMOD_KOH) {
               MEMTEXT ( "NEURAL: new KohNet" ) ;
               network = new KohNet ( n_inputs , n_outputs ,
                                      &koh_params , 1 , 1 ) ;
               }
            }
         if ((network == NULL)  ||  (! network->ok)) {  // Malloc failure?
            memory_message ( "to create network." ) ;
            if (network != NULL) {
               delete network ;
               network = NULL ;
               }
            continue ;
            }
         network->learn ( tset , &learn_params ) ;
         if (network->neterr > 0.999999) {  // Indicates massive failure
            MEMTEXT ( "NEURAL: learn failure delete network" ) ;
            delete network ;
            network = NULL ;
            }
         else {
            sprintf ( msg , "Final error = %.4lf%% of max possible",
                      100.0 * network->neterr ) ;
            normal_message ( msg ) ;
            }
         continue ;
         }

      if (! strcmp ( command , "SAVE WEIGHTS" )) {
         if (network == NULL)
            error_message ( "There are no learned weights to save." ) ;
         else
            wt_save ( network , net_model , 0 , rest ) ;
         continue ;
         }

      if (! strcmp ( command , "RESTORE WEIGHTS" )) {
         if (network != NULL) {
            MEMTEXT ( "NEURAL: delete network for restore" ) ;
            delete network ;
            network = NULL ;
            }
         network = wt_restore ( rest , &net_model ) ;
         if (network == NULL)
            continue ;
         if (tset != NULL) {
            if ((tset->nin != network->nin)
             || (tset->nout != network->nout)
             || (tset->outmod != network->outmod)) {
               error_message ( "Network conflicts with existing training set.");
               continue ;
               }
            }
         out_model = network->outmod ;
         n_inputs = network->nin ;
         n_outputs = network->nout ;
         if (net_model == NETMOD_LAYER) {
            n_hidden1 = ((LayerNet*) network)->nhid1 ;
            n_hidden2 = ((LayerNet*) network)->nhid2 ;
            }
         if (net_model == NETMOD_KOH)
            koh_params.normalization = ((KohNet *) network)->normalization ;
         learn_params.init = -1 ;
         continue ;
         }

      if (! strcmp ( command , "CLEAR TRAINING" )) {
         if (tset != NULL) {
            MEMTEXT ( "NEURAL: delete tset" ) ;
            delete tset ;
            tset = NULL ;
            }
         continue ;
         }

      if (! strcmp ( command , "CLEAR WEIGHTS" )) {
         if (network != NULL) {
            MEMTEXT ( "NEURAL: delete network" ) ;
            delete network ;
            network = NULL ;
            }
         continue ;
         }

      if (! strcmp ( command , "CLASSIFY OUTPUT" )) {
         if (net_model == NETMOD_KOH) {
            error_message ( "Cannot specify output for KOHONEN model." ) ;
            continue ;
            }
         if (n_outputs < 0) {
            error_message ( "CLASSIFY OUTPUT used before N OUTPUTS set." ) ;
            continue ;
            }
         if (out_model != OUTMOD_CLASSIFY) {
            error_message
                  ( "CLASSIFY OUTPUT only valid when OUTPUT MODEL:CLASSIFY" ) ;
            continue ;
            }
         m = sscanf ( rest , "%d" , &n ) ;
         if ((m <= 0)  ||  (n < 0)) {
            sprintf ( msg , "Illegal CLASSIFY OUTPUT: %s", rest ) ;
            error_message ( msg ) ;
            }
         else if (n > n_outputs) {
            sprintf ( msg , "CLASSIFY OUTPUT (%d) exceeds N OUTPUTS (%d)",
                      n, n_outputs ) ;
            error_message ( msg ) ;
            }
         else
            classif_output = n ;
         continue ;
         }


      if (! strcmp ( command , "OUTPUT FILE" )) {
         strcpy ( out_file , rest ) ;
         continue ;
         }

      if (! strcmp ( command , "EXECUTE" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else
            network->execute_from_file ( rest , out_file ) ;
         continue ;
         }

      if (! strcmp ( command , "CLASSIFY" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
            error_message ( "CLASSIFY valid only in CLASSIFY output mode" ) ;
         else
            network->classify_from_file ( rest , threshold ) ;
         continue ;
         }

      if (! strcmp ( command , "RESET CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else
            network->reset_confusion () ;
         continue ;
         }

      if (! strcmp ( command , "CONFUSION THRESHOLD" )) {
         p = atof ( rest ) ;
         if ((p < 0.0)  ||  (p > 100.0)) {
            sprintf ( msg , "Illegal CONFUSION THRESHOLD: %s", rest ) ;
            error_message ( msg ) ;
            }
         else
            threshold = p / 100.0 ;
         continue ;
         }

      if (! strcmp ( command , "SHOW CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
            error_message ( "CONFUSION valid only in CLASSIFY output mode" ) ;
         else
            network->show_confusion () ;
         continue ;
         }

      if (! strcmp ( command , "SAVE CONFUSION" )) {
         if (network == NULL)
            error_message ( "There is no trained network" ) ;
         else if (out_model != OUTMOD_CLASSIFY)
            error_message ( "CONFUSION valid only in CLASSIFY output mode" ) ;
         else
            network->save_confusion ( rest ) ;
         continue ;
         }

      sprintf ( msg , "Unknown command: %s", command ) ;
      error_message ( msg ) ;

      } // Endless command loop

   MEMTEXT ( "NEURAL: control_line, msg" ) ;
   FREE ( control_line ) ;
   FREE ( msg ) ;
   MEMCLOSE () ;
   return 0 ;
}