Exemple #1
0
//-----------------------------ctor---------------------------------------
//
//------------------------------------------------------------------------
CController::CController(int cxClient,
                         int cyClient):
                                       m_bSuccess(false),                                  
                                       m_vPadPos(SVector2D(RandFloat()*cxClient, 50)),
                                       m_cxClient(cxClient),
                                       m_cyClient(cyClient)
                                       
{


  //create a starting postion for the landers
  SVector2D vStartPos = SVector2D(WINDOW_WIDTH/2, cyClient-50);

  //create the user controlled lander
  m_pUserLander = new CLander(cxClient, cyClient, PI, vStartPos, m_vPadPos);

  //set up the VB for the landing pad
  for (int i=0; i<NumPadVerts; ++i)
  {
    m_vecPadVB.push_back(Pad[i]);
  }

  //setup the stars
  for (int i=0; i<NumStars; ++i)
  {
    m_vecStarVB.push_back(SPoint(RandInt(0, cxClient), RandInt(100, cyClient)));
  }

}
Exemple #2
0
dsr::Entity dsr::Entity::buildEntity(unsigned long int size, int cardinality) {

  dsr::Entity e;


  // If cardinality is -1 do random integers
  if (cardinality == -1) {
    for (unsigned i = 0; i < size; ++i) {
      e.add(RandInt());
    }
  }
  else {
    std::vector<unsigned> ms;
    // Create a block of mentions for each item in the cardinality
    // Evenly spread out the randoms for now
    unsigned block = size / cardinality; // (Assume size > cardinality)
    for (unsigned i = 0; i < cardinality; ++i) {
      auto m = RandInt();
      for (auto b = 0; b < block; ++b) {
        ms.push_back(m);
      }
    }

    std::random_shuffle(ms.begin(), ms.end());
    for(auto &m: ms) {
      e.add(m);
    }
  }

  return e;
}
Exemple #3
0
NEURAL_NETWORK *OPTIMIZER::Genome_Get_Random_But_Not(NEURAL_NETWORK *other) {

	// Return a random genome, but don't choose one that is equal
	// to genome `other'.

	int genomeIndex = RandInt(0,AFPO_POP_SIZE-1);
	int numberOfTries = 0;

	while (	(genomes[genomeIndex]==other) ||
		(genomes[genomeIndex]->fitness == 0.0) ||
		(genomes[genomeIndex]->fitness == other->fitness) ) {

		genomeIndex = RandInt(0,AFPO_POP_SIZE-1);

		numberOfTries++;

		// If no appropriate controller can be found,
		// return a random one.

		if ( numberOfTries >= 10000 )
			return( genomes[genomeIndex] );
	}

	return( genomes[genomeIndex] );
}
Exemple #4
0
int main()
{
    const unsigned int SAMPLE_NUM = 1000;

    // 定义训练输入矩阵和输出矩阵
    LPerceptronMatrix X(SAMPLE_NUM, 2);
    LPerceptronMatrix Y(SAMPLE_NUM, 1);
    for (unsigned int i = 0; i < SAMPLE_NUM; i++)
    {
        X[i][0] = (float)RandInt(-1000, 1000);
        X[i][1] = (float)RandInt(-1000, 1000);
        Y[i][0] = ((X[i][0] + X[i][1]) >= 0)? LPERCEPTRON_SUN : LPERCEPTRON_MOON;
    }

    LPerceptronProblem problem(X, Y);

    // 训练模型
    LPerceptron perceptron;
    perceptron.TrainModel(problem);

    // 使用测试样本测试
    LPerceptronMatrix testSample(1, 2);
    testSample[0][0] = -50.0f;
    testSample[0][1] = 0.0f;

    printf("Predict: %f\n", perceptron.Predict(testSample));

    system("pause");
    return 0;
}
xor::xor(void)
{
	fitness = 0;
	posX = RandInt(0, dimMapa/dimCasilla)*dimCasilla;
	posY = RandInt(0, dimMapa/dimCasilla)*dimCasilla;
	crearListaObjetos();
}
Exemple #6
0
NEURAL_NETWORK *OPTIMIZER::Genome_Get_Random_But_Not(int numControllers, NEURAL_NETWORK **controllers) {

	// Return a random genome, but make sure that it doesn't achieve
	// the same fitness as any in the external list of controllers
	// supplied as a parameter.

        int genomeIndex = RandInt(0,AFPO_POP_SIZE-1);

	int found = false; 

	int numberOfTries = 0;

	while ( !found  ) {

		// The genome has not yet been evaluated.

		if ( genomes[genomeIndex]->fitness == 0.0 )

			genomeIndex = RandInt(0,AFPO_POP_SIZE-1);

		else {
			int otherIndex = 0;
			int equal = false;

			while ( (otherIndex<numControllers) && (!equal) ) {
	
				if ( 	(genomes[genomeIndex]==controllers[otherIndex]) ||
					(genomes[genomeIndex]->fitness==controllers[otherIndex]->fitness) )

					equal = true;
				else
					otherIndex++;
			}

			if ( equal ) {

				// The controller is evaluated, but it's equal
				// another controller in the external list.

				genomeIndex = RandInt(0,AFPO_POP_SIZE-1);
			}
			else {
				// The controller is evaluated, and it's unique.
				found = true;
			}
		}

		numberOfTries++;

		// If no appropriate controllers can be found,
		// return a random one that has at least been evaluated.

		if ( numberOfTries >= 10000 ) {

			return( Genome_Get_Random_But_Not(controllers[0]) );
		}
	}

	return( genomes[genomeIndex] );
}
Exemple #7
0
//------------------------MutateSM--------------------------------
//
//	chooses a random start and point then scrambles the genes 
//	between them
//----------------------------------------------------------------
void CgaTSP::MutateSM(vector<int> &chromo)
{
	//return dependent upon mutation rate
	if (RandFloat() > m_dMutationRate) return;

	//first we choose a section of the chromosome
	const int MinSpanSize = 3;

	//these will hold the beginning and end points of the span
  int beg, end;

	ChooseSection(beg, end, chromo.size()-1, MinSpanSize);

	int span = end - beg;

	//now we just swap randomly chosen genes with the beg/end
	//range a few times to scramble them
	int NumberOfSwapsRqd = span;

	while(--NumberOfSwapsRqd)
	{
		vector<int>::iterator gene1 = chromo.begin();
		vector<int>::iterator gene2 = chromo.begin();

		//choose two loci within the range
		advance(gene1, beg + RandInt(0, span));
		advance(gene2, beg + RandInt(0, span));

		//exchange them
		swap(*gene1, *gene2);
		
	}//repeat
}
SGenome geneticselection::tournamentSelection(vector<SGenome> &m_vecPop, double m_totalfitness){
	

	
	
	//Retrieve the size of the population
	int m_vecSize = m_vecPop.size();

	//Randomly select the initial best value
	//REASON: makes selecting every other part of the set easier
	SGenome best = m_vecPop[RandInt(0,m_vecSize-1)];

	//Holder for future individuals in the tournament
	SGenome ind;
	//Run a tournament utilizing 1 to n members of the population
	for (int i = 0; i < CParams::dTournamentNumber - 1; ++i){

		//Randomly select a member of the population
		ind = m_vecPop[RandInt(0, m_vecSize-1)];

		if (ind.dFitness > best.dFitness){
			//The new individual is better than the initial individual
			best = ind;
		}

	}

	//Tournament has run it's course
	//return the best option selected
	return best;
}
/**
*   Função contruída com o objetivo de facilitar o treinamento de uma rede. Utiliza critérios 
* de parada  pré-definidos. O objetivo é paralizar o treinamento a partir do momento em que o 
* erro médio quadrático da rede em relação às amostras para de  diminuir. Recebe um  parâmetro 
* indicando um número mínimo de treinos, a a partir do qual se inicia a verificação da variaçao
* do erro médio quadrático. Recebe também o número de treinamentos a ser executado até que uma
* nova medição  do erro seja feita. Caso a variância (porcentual) das últimas n  medições seja 
* menor ou igual a um determinado valor (entre 0 e 1), paraliza o treinamento.
*   A função recebe ainda um conjunto de amostras (matriz de entradas/matriz de saídas), número 
* de amostras contidas nas matrizes, a dimensão de cada amostra de entrada e de cada amostra de 
* saída e um flag indicando se as amostras devem ser treinadas aleatoriamente ou em ordem.
*/
int BKPNeuralNet::AutoTrain( float**inMatrix, float **outMatrix, int inSize, int outSize, int nSamples, 
              int minTrains, int varVectorSize, float minStdDev, int numTrains, TrainType type, 
              float l_rate, float momentum, int* retExecutedTrains )
{
  // Casos de retorno:
  if( (!inMatrix) || (!outMatrix) || (inSize!=_nLayers[0]) || (_nLayers[_layers-1]!=outSize) )
    return -1;

  // O número de treinamentos inicial tem que ser pelo menos 0:
  if( *retExecutedTrains < 0 )
    *retExecutedTrains = 0;

  int thisSample = -1;    //< Variável auxiliar, indica a amostra a ser treinada.
  // Executando os treinamentos obrigatórios:
  for( int i=0 ; i<minTrains ; i++ )
  {
    if( type == ORDERED_TRAIN )
      thisSample = (++thisSample)%nSamples;
    if( type == RANDOM_TRAIN )
      thisSample = RandInt(0, (nSamples-1));
    Train( inSize, inMatrix[thisSample], outSize, outMatrix[thisSample], l_rate, momentum );
  }

  // Executando os demais treinamentos:
  float* varVector = new float[varVectorSize];  //< Vetor para conter as últimas medições de erro.
  int ptVarVector = 0;              //< Aponta para a primeira posição vazia de varVector.
  float lastVariance = (float)MAX_VALUE;   //< Variâvel que mantém o valor da varirância.
  float StdDev = (float)MAX_VALUE;   //< Variâvel que mantém o valor do desvio-padrão. 
  thisSample = -1;
  int nTrains=minTrains + *retExecutedTrains;  //< Mantém o número de treinamentos executados.
  bool varFlag = false;
  while( StdDev > minStdDev )
  {
    if( type == ORDERED_TRAIN )
      thisSample = (++thisSample)%nSamples;
    if( type == RANDOM_TRAIN )
      thisSample = RandInt(0, (nSamples-1));
    Train( inSize, inMatrix[thisSample], outSize, outMatrix[thisSample], l_rate, momentum );
    if( (nTrains%numTrains) == 0 ) //< A cada numTrains treinamentos, testa o erro:
    {
      float retRMS_Error = 0;
      float mean = 0;
      RMS_error( inMatrix, outMatrix, inSize, outSize, nSamples, &retRMS_Error );
      varFlag = ShiftLeft( varVector, varVectorSize, retRMS_Error, ptVarVector );
      if( varFlag == true )
      {
        lastVariance = Variance( varVector, varVectorSize, &mean );
        StdDev = ((float)sqrt(lastVariance))/mean;
      }
      ptVarVector++;
    }
    nTrains++;
    if( nTrains >= 90000 )   //< O número máximo de treinamentos será 150000.
      StdDev = minStdDev;

  }
  *retExecutedTrains = nTrains;
  return 0;
}
Exemple #10
0
void MATRIX::Perturb(double maxVal) {

	int i, j;
		
	i = RandInt(0,length-1);
	j = RandInt(0,width-1);

	Set(i,j,Rand(0.0,maxVal));
}
//-----------------------------------constructor-------------------------
//
//-----------------------------------------------------------------------
CDiscMinesweeper::CDiscMinesweeper():
							 CMinesweeper(),
                             m_dRotation((ROTATION_DIRECTION)RandInt(0,3))
{
	//create a random start position
	
	m_vPosition = SVector2D<int>(RandInt(0,CParams::WindowWidth/CParams::iGridCellDim)*CParams::iGridCellDim, 
					             RandInt(0,CParams::WindowHeight/CParams::iGridCellDim)*CParams::iGridCellDim);
}
Exemple #12
0
LLGL::ColorRGBAub RandColorRGBA()
{
    return LLGL::ColorRGBAub
    {
        static_cast<std::uint8_t>(RandInt(255)),
        static_cast<std::uint8_t>(RandInt(255)),
        static_cast<std::uint8_t>(RandInt(255)),
        static_cast<std::uint8_t>(RandInt(255))
    };
}
Exemple #13
0
 void RandomSparseMatrix(SparseMatrix& A,int nnz,Real range)
 {
   A.setZero();
   for(int k=0;k<nnz;k++) {
     int i=RandInt(A.m);
     int j=RandInt(A.n);
     Real x=Rand(-range,range);
     A.insertEntry(i,j,x);
   }
 }
Exemple #14
0
NEURAL_NETWORK *OPTIMIZER::Genome_Get_Random(void) {

	int genomeIndex = RandInt(0,AFPO_POP_SIZE-1);

	while ( genomes[genomeIndex]->fitness == 0.0 )

		genomeIndex = RandInt(0,AFPO_POP_SIZE-1);

	return( genomes[genomeIndex] );
}
Exemple #15
0
void   NEURAL_NETWORK::Connection_Remove(int nodeIndex) {

	int i = RandInt(0,((NODES_PER_SENSOR*numSensors)+numNodes)-1);

	while ( weights->Get(i,nodeIndex) == 0 )

		i = RandInt(0,((NODES_PER_SENSOR*numSensors)+numNodes)-1);

	weights->Set(i,nodeIndex,0);
}
bool xor::estoyEnObjeto(){
	for (int i = 0; i < itemsX.size(); i++){
		if(itemsX[i] == posX && itemsY[i] == posY)
		{
			itemsX[i] = RandInt(0, dimMapa/dimCasilla)*dimCasilla;
			itemsY[i] = RandInt(0, dimMapa/dimCasilla)*dimCasilla;
			return true;
		}
	}
	return false;
}
Exemple #17
0
//-------------------------CrossoverPMX---------------------------------
//
// crossover operator based on 'partially matched crossover' as 
// defined in the text
//-------------------------------------------------------------------
void CgaTSP::CrossoverPMX(	const vector<int>	&mum, 
							              const vector<int>	&dad, 
							              vector<int>			&baby1, 
							              vector<int>			&baby2)
{
	baby1 = mum;
	baby2 = dad;
	
	//just return dependent on the crossover rate or if the
	//chromosomes are the same.
	if ( (RandFloat() > m_dCrossoverRate) || (mum == dad)) 
	{
		return;
	}

	//first we choose a section of the chromosome
	int beg = RandInt(0, mum.size()-2);
	
	int end = beg;
	
	//find an end
	while (end <= beg)
	{
		end = RandInt(0, mum.size()-1);
	}

	//now we iterate through the matched pairs of genes from beg
	//to end swapping the places in each child
	vector<int>::iterator posGene1, posGene2;

	for (int pos = beg; pos < end+1; ++pos)
	{
		//these are the genes we want to swap
		int gene1 = mum[pos];
		int gene2 = dad[pos];

		if (gene1 != gene2)
		{
			//find and swap them in baby1
			posGene1 = find(baby1.begin(), baby1.end(), gene1);
			posGene2 = find(baby1.begin(), baby1.end(), gene2);

			swap(*posGene1, *posGene2);

			//and in baby2
			posGene1 = find(baby2.begin(), baby2.end(), gene1);
			posGene2 = find(baby2.begin(), baby2.end(), gene2);
			
			swap(*posGene1, *posGene2);
		}
		
	}//next pair
}	
Exemple #18
0
void   NEURAL_NETWORK::Connection_Add(int nodeIndex) {

	int i = RandInt(0,((NODES_PER_SENSOR*numSensors)+numNodes)-1);

	while ( weights->Get(i,nodeIndex) != 0 )

		i = RandInt(0,((NODES_PER_SENSOR*numSensors)+numNodes)-1);

	if ( FlipCoin() )

		weights->Set(i,nodeIndex,-1);
	else
		weights->Set(i,nodeIndex,+1);
}
//-------------------------------------------Reset()--------------------
//
//	Resets the sweepers position, MinesGathered and rotation
//
//----------------------------------------------------------------------
void CDiscMinesweeper::Reset()
{

	//reset the sweepers positions
	m_vPosition = SVector2D<int>(RandInt(0,CParams::WindowWidth/CParams::iGridCellDim)*CParams::iGridCellDim, 
					             RandInt(0,CParams::WindowHeight/CParams::iGridCellDim)*CParams::iGridCellDim);
	
	CMinesweeper::Reset();

	//and the rotation
	m_dRotation = (ROTATION_DIRECTION)RandInt(0,3);
	//m_dRotation = ROTATION_DIRECTION::SOUTH;
	return;
}
void GameWorld::CreateObstacles()
{
	for (int o=0; o < AICON.NumObstacles; ++o)
	{
		bool bOverlapped = true;
	
		//keep creating tiddlywinks until we find one that doesn't overlap
		//any others.Sometimes this can get into an endless loop because the
		//obstacle has nowhere to fit. We test for this case and exit accordingly

		int NumTrys = 0; int NumAllowableTrys = 2000;

		while (bOverlapped)
		{
			NumTrys++;

			if (NumTrys > NumAllowableTrys) return;

			float scale = 0.1f;
			int radius = RandInt((int)AICON.MinObstacleRadius,  (int)AICON.MaxObstacleRadius);
			radius *= scale;
			const int border                 = 10 * scale;
			const int MinGapBetweenObstacles = 20 * scale;

			noVec3 pos(RandInt(radius+border, m_cxClient-radius-border), 0.0f,  RandInt(radius+border, m_cyClient-radius-30-border));
			WowActor* ob = new WowActor(modelname[0]);
			ob->SetID(g_database.GetNewObjectID());
			ob->SetType(0);
			
			ActorController* pACtrl = new ActorController( ob,
				this, pos, RandFloat() * noMath::TWO_PI, 
				vec3_zero, AICON.VehicleMass, AICON.MaxSteeringForce, AICON.MaxSpeed, AICON.MaxTurnRatePerSecond );
			
			pACtrl->SetBRadius(radius);
			ob->PushStateMachine(*pACtrl);
							

			if (!Overlapped(pACtrl, m_Obstacles, MinGapBetweenObstacles))
			{
				//its not overlapped so we can add it
				m_Obstacles.push_back(pACtrl);

				GameObjectManager::Get()->AddGameObject(ob);
				GetApp()->GetActorRoot()->AddChild(ob->GetNode());

				bOverlapped = false;
			}			
		}
	}
}
Exemple #21
0
void MATRIX::InitColumn(int j, int colSum) {

	int i;

	for (int c=0;c<colSum;c++) {

		i = RandInt(0,length-1);

		while ( Get(i,j) > 0 )
			i = RandInt(0,length-1);

		Add(i,j,1);
	}
}
Exemple #22
0
unsigned long int dsr::Entity::rand() {
  // TODO need a new method if it is in the large state
  if (state == EntityState::NORMAL) {
    auto r = mentions[RandInt() % mentions.size()];
    return r;
  }
  else if (state == EntityState::COMPRESSED) {
    unsigned long int b = RandInt() % stringmap.bucket_count();
    return stringmap.begin(b)->first;
  }
  else {
    throw "Unimplemented Random function"; //TODO
  }
}
Exemple #23
0
//-------------------------MutateDM-------------------------------------
//
//	Select two random points, grab the chunk of chromosome between them 
//	and then insert it back into the chromosome in a random position 
//	displaced from the original.
//----------------------------------------------------------------------
void CgaTSP::MutateDM(vector<int> &chromo)
{
	//return dependent upon mutation rate
	if (RandFloat() > m_dMutationRate) return;

	//first we choose a section of the chromosome
	const int MinSpanSize = 3;
	
	//these will hold the beginning and end points of the span
  int beg, end;
	
	ChooseSection(beg, end, chromo.size()-1, MinSpanSize);

	//setup iterators for our beg/end points
	vector<int>::iterator SectionStart = chromo.begin() + beg;
	vector<int>::iterator SectionEnd   = chromo.begin() + end;

	//hold on to the section we are moving
	vector<int> TheSection;
	TheSection.assign(SectionStart, SectionEnd);

	//erase from current position
	chromo.erase(SectionStart, SectionEnd);

	//move an iterator to a random insertion location
	vector<int>::iterator curPos;
	curPos = chromo.begin() + RandInt(0, chromo.size()-1);

	//re-insert the section
	chromo.insert(curPos, TheSection.begin(), TheSection.end());
	
}
Exemple #24
0
void EnumKare::Crossover(const vector<int> &mum,
	const vector<int> &dad,
	vector<int> &baby1,
	vector<int> &baby2)
{
	//由随机概率决定是否杂交,若两亲本相同也不杂交
	//不杂交则直接复制亲本
	if (RandFloat() > m_dCrossoverRate || mum == dad)
	{
		baby1 = dad;
		baby2 = mum;
		return;
	}
	int cp = RandInt(0, m_iChromoLength - 1);
	int i;
	for ( i = 0; i < cp; i++)
	{
		baby1.push_back(mum[i]);
		baby2.push_back(dad[i]);
	}
	for ( i = cp; i < mum.size(); i++)
	{
		baby1.push_back(dad[i]);
		baby2.push_back(mum[i]);
	}
}
Exemple #25
0
//----------------------------Crossover--------------------------------
//	Takes 2 parent vectors, selects a midpoint and then swaps the ends
//	of each genome creating 2 new genomes which are stored in baby1 and
//	baby2.
//---------------------------------------------------------------------
void Cga::Crossover( const vector<int> &mum,
						const vector<int> &dad,
						vector<int>		  &baby1,
						vector<int>		  &baby2)
{
	//just return parents as offspring dependent on the rate
	//or if parents are the same
	if ( (RandFloat() > m_dCrossoverRate) || (mum == dad)) 
	{
		baby1 = mum;
		baby2 = dad;

		return;
	}
	
	//determine a crossover point
	int cp = RandInt(0, m_iChromoLength - 1);

	//swap the bits
	for (int i=0; i<cp; ++i)
	{
		baby1.push_back(mum[i]);
		baby2.push_back(dad[i]);
	}

	for (i=cp; i<mum.size(); ++i)
	{
		baby1.push_back(dad[i]);
		baby2.push_back(mum[i]);
	}
}
Exemple #26
0
/*
   Turn a roach.
*/
void
TurnRoach(Roach *roach)
{
    if (roach->index != (roach->rp - roachPix)) return;

    if (roach->turnLeft) {
	roach->index += (RandInt(30) / 10) + 1;
	if (roach->index >= ROACH_HEADINGS)
	    roach->index -= ROACH_HEADINGS;
    }
    else {
	roach->index -= (RandInt(30) / 10) + 1;
	if (roach->index < 0)
	    roach->index += ROACH_HEADINGS;
    }
}
Exemple #27
0
void SetArrayRand(int x[], int n){
//1. Randomly generate elements of x array, e.g, */
int i = 0;
for (i; i < n ; i++){
	x[i] = RandInt(30,100);
	}
}
Exemple #28
0
/**
Crossover (exploration) of some sort
*/
void CBasicEA::crossover(const CNeuralNet & genotypeA, const CNeuralNet & genotypeB,
	CNeuralNet & offspring1, const int networkSize)
{
	assert(genotypeA.vecLayers.size() == genotypeB.vecLayers.size());
	//TODO:: roll your own 
	// Performing 1-Point crossover
	int selectedPoint = RandInt(0, networkSize);
	selectedPoint = (selectedPoint == networkSize) ? selectedPoint - 1 : selectedPoint;

	// perform crossover
	for (int layerL = 0; layerL < offspring1.vecLayers.size(); layerL++) {
		for (int neuronN = 0; neuronN < offspring1.vecLayers[layerL].vecNeurons.size(); neuronN++) {
			for (int weightW = 0; weightW < offspring1.vecLayers[layerL].vecNeurons[neuronN].vecWeights.size(); 
				weightW++, selectedPoint--) {
				if (selectedPoint  > 0) {
					offspring1.vecLayers[layerL].vecNeurons[neuronN].vecWeights[weightW] =
						genotypeB.vecLayers[layerL].vecNeurons[neuronN].vecWeights[weightW];
				} else {
					offspring1.vecLayers[layerL].vecNeurons[neuronN].vecWeights[weightW] =
						genotypeA.vecLayers[layerL].vecNeurons[neuronN].vecWeights[weightW];
				}
			}
		}
	}
}
Exemple #29
0
unsigned long IntVector::WriteRandomSet(int Min, int Max, unsigned int* Size)
{
	if(Max-Min < (int)(*Size-1))
	{
		std::cerr << "Vector too short\n";
		return RET_FAILED;
	}
	else
	{
		unsigned int s=(unsigned int)this->size();
		if(Size!=NULL) s=*Size;
		this->clear();
		std::map<int, int> m;
		for(unsigned int i=0; i<s; i++)
		{
			int r;
			while(true)
			{
				r=RandInt(Min, Max);
				if(m.find(r)==m.end())
				{
					m[r]=1;
					break;
				}
			}
			this->push_back(r);
		}
		std::sort(this->begin(), this->end());
		return RET_OK;
	}
}
Exemple #30
0
//-----------------------------ChooseSection----------------------------
//
//	given a max span size and a min span size, this will calculate a 
//  random beginning and end point within the span. Used mainly in 
//  mutation and crossover operators
//----------------------------------------------------------------------
void ChooseSection(int       &beg,
                   int       &end,
                   const int max_span,
                   const int min_span)
{
	
	beg = RandInt(0, max_span-min_span);
	
	end = beg;
	
	//find an end
	while (end <= beg)
	{
		end = RandInt(0, max_span);
	}
}