void selection::sigmaScaling(vector<individual*> &adults, vector<individual*> &parents, int count){ int i, size = adults.size(); float std = 0, avg, sum = 0; vector<float> sigmaValues; vector<individual*> p; for(i = 0; i < size; i++){ //Calc total fitness sum += adults[i]->getFitness(); } avg = sum / static_cast<float>(size); for(int i = 0; i < size; i++){ //Standard deviation std += pow(adults[i]->getFitness()-avg, 2); } std = sqrt(sum/size); sum = 0; for(i = 0; i < size; i++){ //Calculate sigma values (unnormalized) sigmaValues.push_back(exp((1+( adults[i]->getFitness()-avg )/(2*std)))); sum += sigmaValues[i]; } for(i = 0; i < size; i++){ //Normalized probability sigmaValues[i] = sigmaValues[i]/sum; } p = rouletteWheel(sigmaValues, adults, count); parents.insert(parents.end(), p.begin(), p.end()); }
//Fitness Proportionate void selection::fitnessProportionate(vector<individual*> &adults, vector<individual*> &parents, int count){ int sum = 0, i; vector<float> fitnessValues; vector<individual*> p; for(i = 0; i < adults.size(); i++){ //Sum up fitness values fitnessValues.push_back(adults[i]->getFitness()); sum += fitnessValues[i]; } for(i = 0; i < fitnessValues.size(); i++){ //Calculate probabilities fitnessValues[i] = fitnessValues[i]/sum; } p = rouletteWheel(fitnessValues, adults, count); parents.insert(parents.end(), p.begin(), p.end()); }
void selection::rankSelection(vector<individual*> &adults, vector<individual*> &parents, int count){ vector<float> rankValues; int i, size = adults.size(), r = size; float num = r-1, sum = 0.0f; vector<individual*> p; sort(adults.begin(), adults.end(), fitnessSortFunc); for(i = 0; i < size; i++){ //Calculate rank values rankValues.push_back(exp((MIN_RANKSELECT_VAL+(MAX_RANKSELECT_VAL-MIN_RANKSELECT_VAL)*(((r-1)/num))))); sum += rankValues[i]; r--; } for(i = 0; i < size; i++){ //Calculate normalized probabilities rankValues[i] = rankValues[i]/sum; } p = rouletteWheel(rankValues, adults, count); parents.insert(parents.end(), p.begin(), p.end()); }
//run the genetic algorithm initialized before with some training parameters: //training location, training algorithm, desired error, max_epochs, epochs_between_reports //see "FeedForwardNNTrainer" class for more details //printtype specifies how much verbose will be the execution (PRINT_ALL,PRINT_MIN,PRINT_OFF) void GAFeedForwardNN::evolve(const int n, const float * params, const int printtype){ if(n<5){printf("TOO FEW PARAMETERS FOR TRAINING\n");exit(1);} int layers[nhidlayers+2]; int functs[nhidlayers+2]; float learningRate; float fitnesses[popsize]; float totfitness=0; float bestFitnessEver=0; FloatChromosome newpop[popsize]; layers[0]=trainingSet->getNumOfInputsPerInstance(); layers[nhidlayers+1]=trainingSet->getNumOfOutputsPerInstance(); //for each generation for(int gen=0;gen<generations;gen++){ float bestFitnessGeneration=0; int bestFitGenIndex=0; totfitness=0; printf("GENERATION NUMBER:\t%d\n\n",gen); //fitness evaluation of each individual for(int i=0;i<popsize;i++){ printf("\nINDIVIDUAL N:\t%d\n",i); //decode the chromosome hidden layers sizes for(int j=0;j<nhidlayers;j++){ layers[j+1]=chromosomes[i].getElement(j); } //decode the chromosome activation functions for each layer for(int j=0;j<nhidlayers+2;j++){ functs[j]=chromosomes[i].getElement(j+nhidlayers); } //decode the chromosome learning rate learningRate=chromosomes[i].getElement(nhidlayers+nhidlayers+2); float medium=0; FeedForwardNN mseT; //do a number of evaluations with different weights and average the results for(int n=0;n<numberofevaluations;n++){ //choose what to print based on user's choice int print=PRINT_ALL; if(printtype==PRINT_MIN){ if(n==0) print=PRINT_MIN; else print=PRINT_OFF; } if(printtype==PRINT_OFF) print=PRINT_OFF; //decode the chromosome into a real network FeedForwardNN net(nhidlayers+2,layers,functs); FeedForwardNNTrainer trainer; trainer.selectTrainingSet(*trainingSet); if(testSet!=NULL){ trainer.selectTestSet(*testSet); } trainer.selectNet(net); trainer.selectBestMSETestNet(mseT); float par[]={params[0],params[1],params[2],params[3],params[4],learningRate,0,SHUFFLE_ON,ERROR_LINEAR}; //do the training of the net and evaluate is MSE error medium+=trainer.train(9,par,print)/float(numberofevaluations); } //the fitness is computed as the inverse of the MSE fitnesses[i]=1.0f/medium; printf("FITNESS:\t%.2f\n\n",fitnesses[i]); //updates the best individual of the generation if(fitnesses[i]>bestFitnessGeneration){bestFitnessGeneration=fitnesses[i];bestFitGenIndex=i;} //if this is the best fitness ever it store the network in bestNet if(bestNet!=NULL) if(fitnesses[i]>bestFitnessEver){*bestNet=mseT;bestFitnessEver=fitnesses[i];} totfitness+=fitnesses[i]; } //the best individual is always carried to the next generation newpop[0]=chromosomes[bestFitGenIndex]; //generate the new population for(int i=1;i<popsize;i++){ //selection int firstmate=0,secondmate=0; //first mate switch(selectionalgorithm){ case ROULETTE_WHEEL: firstmate=rouletteWheel(popsize,fitnesses); break; case TOURNAMENT_SELECTION: firstmate=tournament(popsize,fitnesses,popsize/5+1); break; default: printf("SELECTION ALGORITHM NOT IMPLEMENTED YET\n");exit(1);break; } //second mate do{ switch(selectionalgorithm){ case ROULETTE_WHEEL: secondmate=rouletteWheel(popsize,fitnesses); break; case TOURNAMENT_SELECTION: secondmate=tournament(popsize,fitnesses,popsize/5+1); break; default: printf("SELECTION ALGORITHM NOT IMPLEMENTED YET\n");exit(1);break; } }while(firstmate==secondmate); FloatChromosome child; //do the crossover child=crossover(chromosomes[firstmate],chromosomes[secondmate],pcross); //and the mutation child=mutation(child,pmut,maxdimhiddenlayers,nhidlayers); //and put the child in the new generation newpop[i]=child; } //copy the new generation over the older one, wich is the one we will still use for(int i=0;i<popsize;i++){ chromosomes[i]=newpop[i]; } } }