Exemple #1
0
int main()
{ RNG *foo;
  int i;
  foo = rng_create();
  printf("Five uniform random numbers\n");
  for (i=0; i < 5; i++)
      printf("%f ", rng_uniform01(foo));
  printf("\n");
  printf("Five Gaussian random numbers\n");
  for (i=0; i < 5; i++)
      printf("%f ", rng_gaussian(foo, 0.0, 1.0));
  printf("\n\n");
  rng_destroy(foo);

  RAN1_SeedRandom((long)time(NULL));
    printf("Five uniform random numbers (ran1 api)\n");
  for (i=0; i < 5; i++)
      printf("%f ", RAN1_SimpleRandom());
  printf("\n");
  printf("Five Gaussian random numbers (ran1 api)\n");
  for (i=0; i < 5; i++)
      printf("%f ", RAN1_GaussianRandom(0.0, 1.0));
  printf("\n");
  return(0);
}
int machine_init(void)
{
    int i = 0;
    rcc_clock_setup_hse_3v3(&rcc_hse_25mhz_3v3);
    gpio_create(NULL, &gpio_led0);
    gpio_create(NULL, &gpio_button);
    /* UARTS */
    for (i = 0; i < NUM_UARTS; i++) {
        uart_create(&uart_configs[i]);
    }
    /* I2Cs */
    for (i = 0; i < NUM_I2CS; i++) {
        i2c_create(&i2c_configs[i]);
    }
    rng_create(1, RCC_RNG);
    sdio_conf.rcc_reg = (uint32_t *)&RCC_APB2ENR;
    sdio_conf.rcc_en  = RCC_APB2ENR_SDMMC1EN;
    sdio_init(&sdio_conf);
    usb_init(&usb_guest);
    ethernet_init(&eth_config);
    return 0;
}
int machine_init(void)
{
    int i;
    rcc_clock_setup_hse_3v3(&rcc_hse_8mhz_3v3[STM32_CLOCK]);
    gpio_create(NULL, &Led0);
    gpio_create(NULL, &Led1);
    /* Uarts */
    for (i = 0; i < NUM_UARTS; i++) {
        uart_create(&uart_configs[i]);
    }

    /* I2Cs */
    for (i = 0; i < NUM_I2CS; i++) {
        i2c_create(&i2c_configs[i]);
    }

#ifdef CONFIG_DEVSTMPE811
    stmpe811_init(&ts_conf);
#endif

    rng_create(1, RCC_RNG);
    return 0;
}
int main() 
{ int c;
  int random_index;
  double Y;
  
  network_ptr my_network;  /* This is a pointer to the neural net structure that will
                              store our perceptron */

  RNG *rng_one;            /* This holds the current state of a random number generator.
                              see rnd.c if you interested */

  
  /* Note that below I'm not training the outputs to extreme values (0.0 or 1.0) 
     and I'm not inputting extreme values (0.0 or 1.0) on the input units.
     Anyone want to hazard a guess as to why? */
     
  /* Define Input Vectors (XOR inputs) */
  //double X[4][2] = { {0.05, 0.05}, {0.05, 0.95}, {0.95, 0.05}, {0.95, 0.95} };  

  /* Define Corresponding Desired Outputs (XOR outputs) */
  //double D[4]    = { 0.05, 0.95, 0.95, 0.05 };

  double X[1];
  double D[1];
  double cc;

  /* Set up random number generator */
   rng_one = rng_create();

  /* Set up neural network */
   malloc_network(1, 500, 1, &my_network); /* Two input nodes, two hidden nodes, one output node */

   /* Randomize initial weights */
   init_network_weights(my_network,0.1,rng_one);

   /* Ok... let's let this guy learn the XOR function.  We'll use random selection 
      of input/output pairs and update weights with each trial (non-epoch).  Also,
      we'll hard code for 50,000 presentations.  You might want to change the stopping
      condition to something that makes more sense, like achieving a target gradient */
   int i=0;
   int counter=0;
   int counter2=0;
   double o1,o2;
   o1=0.0;
   o2=0.0;
   for (c=0; c<5000000; c++)
      { 
        o1=o2;
        X[0] = rng_uniform(rng_one, 0.0, 2.0 * 3.14159); 
		D[0] = sin(X[0]);
		compute_network_outputs(&X[0], my_network);        /* feed inputs forward  */
        compute_network_gradients(&D[0], my_network);      /* compute gradients       */
        apply_delta_rule(my_network, 0.01);                /* adjust weights       */
        o2 = my_network->output_layer[0].output;
        for(i=0;i<my_network->hidden_neuron_count;i++){                
             if(my_network->hidden_layer[i].gradient-0<0.000001 && my_network->hidden_layer[i].gradient-0>-0.000001){ 
                    counter++;
             }
        }
        if(o2-o1<0.1 && o2-o1>-0.1){            //output2-output1 must be in the range[-0.01,0.01], and 10 times
                    counter2++;              
        }else{
                    counter2=0;
        }
        if(counter==500&&counter2==3){          //all gradients of hiden layer are 0.0      
                 break;
        }
        counter=0;
      } 
     


   /* Now, let's see how we did by checking all the inputs against what the trained network
      produces */
	  
   printf("\nNetwork Outputs for Each Training Pattern\n");
   printf("-----------------------------------------\n");
   
   for (cc=0.0; cc<= 2.0 * 3.14159; cc+=0.01)
       { 
         X[0] = cc;
		 compute_network_outputs(&X[0],my_network); /* Feed input pattern forward */
         Y = my_network->output_layer[0].output;   /* Get output of output layer node 0 */
         printf("%lf %lf\n", X[0], Y); /* Print outputs generated by trained network */
       }


   printf("\n");
   printf("Perceptron Architecture \n");
   printf("-----------------------------------------\n");

   //print_network_parameters(my_network);
   print_network_outputs(my_network);
   free_network(&my_network);
   
    printf("Break at:%d\n",c);    
   system("pause");

}