void GeneticAlgorithm::initPopulation() { // #ifdef PARALLEL // int argc = 0; // char **argv = NULL; // int myid; // id tego komputera // int numprocs; // ilosc procesorow // int namelen; // nazwa konkretnego procesora // char processor_name[MPI_MAX_PROCESSOR_NAME]; // MPI_Init(&argc, &argv); // uzyskanie dostepu do srodowiska mpi // MPI_Comm_size(MPI_COMM_WORLD, &numprocs); // definiujemy ilosc procesorow // MPI_Comm_rank(MPI_COMM_WORLD, &myid); // definiujemy id tego procesora // MPI_Get_processor_name(processor_name, &namelen); // pobieramy nazwe proc. // if(myid == 0) { // #endif std::vector<Individual>::iterator it; for(it = _population.begin(); it != _population.end(); ++it) { it->init(); it->calculateFitness(_cp); } calculateStatistics(); // #ifdef PARALLEL // } // MPI_Finalize(); // obowiazkowe zamkniecie mpi // #endif }
Caller::Caller( const double poissonLambda, const int minDepth, const double leftStrandBias, const double rightStrandBias, const double readEndFraction, const int qCutoff, const char* tumorDirectoryPath, const char* outputDirectoryPath, const int usePoissonGermline, const int disableLvl5Filter) { this->poissonLambda = poissonLambda; this->minDepth = minDepth; this->strandBiasLeft = leftStrandBias; this->strandBiasRight = rightStrandBias; this->minQScore = qCutoff; this->readEndFraction = readEndFraction; this->usePoissonGermline = usePoissonGermline; this->disableLvl5Filter = disableLvl5Filter; ( this->tumorDirectoryPath).assign( tumorDirectoryPath); ( this->outputDirectoryPath).assign( outputDirectoryPath); // Setup output files ( this->outputPaths).push_back( this->outputDirectoryPath + "/calls_level1.sinvict"); ( this->outputPaths).push_back( this->outputDirectoryPath + "/calls_level2.sinvict"); ( this->outputPaths).push_back( this->outputDirectoryPath + "/calls_level3.sinvict"); ( this->outputPaths).push_back( this->outputDirectoryPath + "/calls_level4.sinvict"); ( this->outputPaths).push_back( this->outputDirectoryPath + "/calls_level5.sinvict"); ( this->outputPaths).push_back( this->outputDirectoryPath + "/calls_level6.sinvict"); Common::getFilesInDir( this->tumorDirectoryPath, tumorPaths); std::sort( tumorPaths.begin(), tumorPaths.end()); for( int i = 0; i < tumorPaths.size(); i++) { std::string nextTumorSamplePath = this->tumorDirectoryPath + "/" + tumorPaths[i]; loadEntries( nextTumorSamplePath); } calculateStatistics(); }
LGraph DTALikelihood::calculateStatisticsDirect(Particles &particles) { QVector<QVector3D> qp; particles.appendToQVector3DList(qp); LGraph graph; calculateStatistics(qp, graph); return graph; }
void GeneticAlgorithm::nextGeneration() { std::vector<Individual>::iterator it; for(it = _population.begin(); it != _population.end(); ++it) { it->_genotype.mutate(_mutationProbability); it->calculateFitness(_cp); } calculateStatistics(); }
void ElevationProfileFloatItem::toggleZoomToViewport() { m_zoomToViewport = ! m_zoomToViewport; calculateStatistics( m_eleData ); if ( ! m_zoomToViewport ) { m_axisX.setRange( m_eleData.first().x(), m_eleData.last().x() ); m_axisY.setRange( qMin( m_minElevation, qreal( 0.0 ) ), m_maxElevation ); } readSettings(); emit settingsChanged( nameId() ); }
void ElevationProfileFloatItem::handleDataUpdate(const GeoDataLineString &points, QList<QPointF> eleData) { m_eleData = eleData; m_points = points; calculateStatistics( m_eleData ); if ( m_eleData.length() >= 2 ) { m_axisX.setRange( m_eleData.first().x(), m_eleData.last().x() ); m_axisY.setRange( qMin( m_minElevation, qreal( 0.0 ) ), m_maxElevation ); } emit dataUpdated(); }
void ElevationProfileFloatItem::updateVisiblePoints() { if ( ! m_activeDataSource->isDataAvailable() || m_points.size() < 2 ) { return; } // find the longest visible route section on screen QList<QList<int> > routeSegments; QList<int> currentRouteSegment; for ( int i = 0; i < m_eleData.count(); i++ ) { qreal lon = m_points[i].longitude(GeoDataCoordinates::Degree); qreal lat = m_points[i].latitude (GeoDataCoordinates::Degree); qreal x = 0; qreal y = 0; if ( m_marbleWidget->screenCoordinates(lon, lat, x, y) ) { // on screen --> add point to list currentRouteSegment.append(i); } else { // off screen --> start new list if ( !currentRouteSegment.isEmpty() ) { routeSegments.append( currentRouteSegment ); currentRouteSegment.clear(); } } } routeSegments.append( currentRouteSegment ); // in case the route ends on screen int maxLenght = 0; foreach ( const QList<int> ¤tRouteSegment, routeSegments ) { if ( currentRouteSegment.size() > maxLenght ) { maxLenght = currentRouteSegment.size() ; m_firstVisiblePoint = currentRouteSegment.first(); m_lastVisiblePoint = currentRouteSegment.last(); } } if ( m_firstVisiblePoint < 0 ) { m_firstVisiblePoint = 0; } if ( m_lastVisiblePoint < 0 || m_lastVisiblePoint >= m_eleData.count() ) { m_lastVisiblePoint = m_eleData.count() - 1; } // include setting range to statistics and test for m_zoomToViewport in calculateStatistics(); if ( m_zoomToViewport ) { calculateStatistics( m_eleData ); m_axisX.setRange( m_eleData.value( m_firstVisiblePoint ).x(), m_eleData.value( m_lastVisiblePoint ).x() ); m_axisY.setRange( m_minElevation, m_maxElevation ); } return; }
void DTALikelihood::calculateModel(Model *model) { m_modelParticles.clear(); m_originalParticles->calculateBoundingBox(); model->start(); for (Particle* pos : m_originalParticles->getParticles()) { if (!model->isInVoid(pos->getPos())) { m_modelParticles.append(pos->getPos()); } } model->stop(); calculateStatistics(m_modelParticles,m_modelData); }
const SimulationResults& Simulation::run() { //Stat* statistics = new Stat(); //statistics->setVerbose(true); // FIXME: remove! //statistics->setSchedulerId(scheduler->getId()); LOG(0) << "######## Running Simulation with scheduler " << scheduler->getId(); LOG(0) << "\tAim are " << steps << " iterations"; initCounters(); for (size_t taskNum = 0; taskNum< (*taskset).size(); taskNum++) { (*taskset)[taskNum]->start(0); } unsigned int time = 0; bool schedSuccess = true; for (time = 0; time < steps; time++) { LOG(1) << "\n\nT : " << time; doActivations(time, scheduler); schedSuccess = doExecutions(time, scheduler/*, statistics*/); if (!schedSuccess) { LOG(0) << "Executions failed in regular time step " << time; break; } } if (schedSuccess) { while (scheduler->hasPendingJobs()) { bool bdisp = doExecutions(time, scheduler/*, statistics*/); ++time; if (!bdisp) { LOG(0) << "Executions failed in during cleanup in step " << time; break; } } } LOG(0) << "Totally simulated time: " << time; printExecStats(scheduler->getId()); printDelayCounters(); // Save the statistics in a other object //storeStatistics(statistics); calculateStatistics(); stats.simulatedTime = time; stats.success = schedSuccess; return stats; }
bool MuSlashRhoLambdaES::startEvolutionRun( int mu, int rho, int lambda, ESInitialize *init) { deletePopulation(); // set information struct to values mInformation.currentGeneration = 0; mInformation.mu = mu; mInformation.rho = rho; mInformation.lambda = lambda; mInformation.numberOfLastSuccessfulIndividuals = 0; if(mInformation.mu <= 0 || mInformation.rho <= 0 || mInformation.lambda <= 0){ return false; } mPopulation = init->createPopulation(mInformation.mu); if(mPopulation.size() <= 0){ return false; } mInformation.numberOfStrategyParameters = mPopulation.first()->getStrategyParameters().size(); mInformation.numberOfObjectParameters = mPopulation.first()->getObjectParameters().size(); //calculate fitness values of population bool ok = mFitnessEvaluation->doEvaluation(mPopulation, mInformation); if(ok == false){ return false; } qSort(mPopulation.begin(), mPopulation.end(), MuSlashRhoLambdaES::descendingOrder); calculateStatistics(); return true; }
std::vector<Genome> GenAlg::new_generation(std::vector<Genome> &last_generation) { generation_count++; reset(); std::sort(last_generation.begin(), last_generation.end(), myComparer); calculateStatistics(); std::vector<Genome> new_population; //get elites GetBest(my_params.num_elite, my_params.num_copies_of_elite, last_generation, new_population); //genetic algorithm loop while(new_population.size() < pop_size) { Genome mother = getRandomChromosome(); Genome father = getRandomChromosome(); std::vector<float> child1, child2; crossover(mother.weights, father.weights, child1, child2); mutate(child1); mutate(child2); new_population.push_back(Genome(child1, 0)); new_population.push_back(Genome(child2, 0)); } population = new_population; return population; }
double ClassifierTester::StartTest(const LandmarkCollectionDataPtr& collection, const std::string& savedTrainedDataFilePath, const int numberOfIterations, bool showSteps) { BOOST_ASSERT(savedTrainedDataFilePath.size() != 0); BOOST_ASSERT(collection->CollectionSize() != 0); m_collection = collection; m_savedTrainedDataFilePath = savedTrainedDataFilePath; m_showSteps = showSteps; m_numberOfIterations = numberOfIterations; m_detector = DetectorPtr(new Detector(m_savedTrainedDataFilePath)); int i = 0; collection->EnumerateConstColectionWithCallback([&] (const ImageLandmarkDataPtr& landmarkData, const int index, bool* stop) { // std::cout<<index<<std::endl; const cv::Mat image = landmarkData->ImageSource(); const cv::Mat landmarks = processImage(image, landmarkData->LandmarksMat()); if (landmarks.empty()) { i++; // std::cout<<"Fail image "<<landmarkData->ImagePath()<<std::endl; } else { collectStatistics(landmarks, landmarkData->LandmarksMat()); } }); std::cout<<"Numeber of fail images: "<<i<<std::endl; return calculateStatistics(); }
std::shared_ptr<te::mem::DataSet> terrama2::services::analysis::core::createAggregationBuffer( std::vector<uint32_t>& indexes, std::shared_ptr<ContextDataSeries> contextDataSeries, Buffer buffer, StatisticOperation aggregationStatisticOperation, const std::string& attribute) { std::shared_ptr<te::mem::DataSet> dsOut; if(indexes.empty()) return dsOut; // Creates memory dataset for buffer te::da::DataSetType* dt = new te::da::DataSetType("buffer"); auto syncDs = contextDataSeries->series.syncDataSet; auto sampleGeom = syncDs->getGeometry(0, contextDataSeries->geometryPos); int geomSampleSrid = sampleGeom->getSRID(); te::gm::GeometryProperty* prop = new te::gm::GeometryProperty("geom", 0, te::gm::MultiPolygonType, true); prop->setSRID(geomSampleSrid); dt->add(prop); te::dt::SimpleProperty* prop02 = new te::dt::SimpleProperty("attribute", te::dt::DOUBLE_TYPE, true); dt->add(prop02); dsOut.reset(new te::mem::DataSet(dt)); std::shared_ptr<te::gm::Envelope> box(syncDs->getExtent(contextDataSeries->geometryPos)); if(buffer.unit.empty()) buffer.unit = "m"; // Inserts each geometry in the rtree, if there is a conflict, it makes the union of the two geometries te::sam::rtree::Index<OccurrenceAggregation*, 4> rtree; for(size_t i = 0; i < indexes.size(); ++i) { auto geom = syncDs->getGeometry(indexes[i], contextDataSeries->geometryPos); double distance = terrama2::core::convertDistanceUnit(buffer.distance, buffer.unit, "METER"); std::unique_ptr<te::gm::Geometry> tempGeom(dynamic_cast<te::gm::Geometry*>(geom.get()->clone())); if(!tempGeom) { QString errMsg(QObject::tr("Invalid geometry in dataset: ").arg(contextDataSeries->series.dataSet->id)); throw terrama2::InvalidArgumentException() << ErrorDescription(errMsg); } int utmSrid = terrama2::core::getUTMSrid(tempGeom.get()); // Converts to UTM in order to create buffer in meters if(tempGeom->getSRID() != utmSrid) { tempGeom->transform(utmSrid); } std::unique_ptr<te::gm::Geometry> aggBuffer(tempGeom->buffer(distance, 16, te::gm::CapButtType)); aggBuffer->setSRID(utmSrid); // Converts buffer to DataSet SRID in order to compare with the occurrences in the rtree aggBuffer->transform(geomSampleSrid); std::vector<OccurrenceAggregation*> vec; bool aggregated = false; // Search for occurrences in the same area rtree.search(*(aggBuffer->getMBR()), vec); for(std::size_t t = 0; t < vec.size(); ++t) { OccurrenceAggregation* occurrenceAggregation = vec[t]; // If the an intersection is found, makes the union of the two geometries and mark the index. if(aggBuffer->intersects(occurrenceAggregation->buffer.get())) { rtree.remove(*(occurrenceAggregation->buffer->getMBR()), occurrenceAggregation); occurrenceAggregation->buffer.reset(aggBuffer->Union(occurrenceAggregation->buffer.get())); occurrenceAggregation->indexes.push_back(i); rtree.insert(*(occurrenceAggregation->buffer->getMBR()), occurrenceAggregation); aggregated = true; } } if(!aggregated) { OccurrenceAggregation* occurrenceAggregation = new OccurrenceAggregation(); occurrenceAggregation->buffer.reset(aggBuffer.release()); occurrenceAggregation->indexes.push_back(i); rtree.insert(*(occurrenceAggregation->buffer->getMBR()), occurrenceAggregation); } } // Fills the memory dataset with the geometries std::vector<OccurrenceAggregation*> occurrenceAggVec; rtree.search(*(box.get()), occurrenceAggVec); int attributeType = -1; if(aggregationStatisticOperation != StatisticOperation::COUNT) { auto property = contextDataSeries->series.teDataSetType->getProperty(attribute); if(!property) { QString errMsg(QObject::tr("Invalid attribute: %1").arg(QString::fromStdString(attribute))); throw terrama2::InvalidArgumentException() << ErrorDescription(errMsg); } attributeType = property->getType(); } for(size_t i = 0; i < occurrenceAggVec.size(); i++) { OccurrenceAggregation* occurrenceAggregation = occurrenceAggVec[i]; OperatorCache cache; std::vector<double> values; values.reserve(occurrenceAggregation->indexes.size()); for(unsigned int j = 0; j < occurrenceAggregation->indexes.size(); ++j) { cache.count++; if(aggregationStatisticOperation != StatisticOperation::COUNT) { if(attribute.empty()) { QString errMsg(QObject::tr("Invalid attribute")); throw terrama2::InvalidArgumentException() << ErrorDescription(errMsg); } double value = getValue(syncDs, attribute, occurrenceAggregation->indexes[j], attributeType); values.push_back(value); cache.sum += value; if(value > cache.max) cache.max = value; if(value < cache.min) cache.min = value; } } calculateStatistics(values, cache); auto item = new te::mem::DataSetItem(dsOut.get()); item->setGeometry(0, dynamic_cast<te::gm::Geometry*>(occurrenceAggregation->buffer->clone())); item->setDouble(1, getOperationResult(cache, aggregationStatisticOperation)); dsOut->add(item); } return dsOut; }
double iAIDA::AIDA_Histogram_native::AIDA_Histogram2D::equivalentBinEntries() const { calculateStatistics(); return m_ebe; }
double iAIDA::AIDA_Histogram_native::AIDA_Histogram2D::sumExtraBinHeights() const { calculateStatistics(); return m_sumExtraBinHeights; }
void ExperimentReport::generateLatexTables() { latexDirectory_ = experimentBaseDirectory_ + "/" + latexDirectory_; cout << "latex directory: " << latexDirectory_ << endl; vector<double> *** data = new vector<double>**[indicatorList_.size()]; for (int indicator = 0; indicator < indicatorList_.size(); indicator++) { // A data vector per problem data[indicator] = new vector<double>*[problemList_.size()]; for (int problem = 0; problem < problemList_.size(); problem++) { data[indicator][problem] = new vector<double>[algorithmNameList_.size()]; for (int algorithm = 0; algorithm < algorithmNameList_.size(); algorithm++) { string directory = experimentBaseDirectory_; directory += "/data/"; directory += "/" + algorithmNameList_[algorithm]; directory += "/" + problemList_[problem]; directory += "/" + indicatorList_[indicator]; // Read values from data files std::ifstream in(directory.c_str()); if( !in ) { cout << "Error trying to read quality indicator file: " << directory << endl; exit(-1); } // if string aux; while( getline(in, aux ) ) { data[indicator][problem][algorithm].push_back(atof(aux.c_str())); } // while } // for } // for } // for double *** mean; double *** median; double *** stdDeviation; double *** iqr; double *** max; double *** min; int *** numberOfValues; map<string, double> statValues; statValues["mean"] = 0.0; statValues["median"] = 0.0; statValues["stdDeviation"] = 0.0; statValues["iqr"] = 0.0; statValues["max"] = 0.0; statValues["min"] = 0.0; mean = new double**[indicatorList_.size()]; median = new double**[indicatorList_.size()]; stdDeviation = new double**[indicatorList_.size()]; iqr = new double**[indicatorList_.size()]; min = new double**[indicatorList_.size()]; max = new double**[indicatorList_.size()]; numberOfValues = new int**[indicatorList_.size()]; for (int indicator = 0; indicator < indicatorList_.size(); indicator++) { // A data vector per problem mean[indicator] = new double*[problemList_.size()]; median[indicator] = new double*[problemList_.size()]; stdDeviation[indicator] = new double*[problemList_.size()]; iqr[indicator] = new double*[problemList_.size()]; min[indicator] = new double*[problemList_.size()]; max[indicator] = new double*[problemList_.size()]; numberOfValues[indicator] = new int*[problemList_.size()]; for (int problem = 0; problem < problemList_.size(); problem++) { mean[indicator][problem] = new double[algorithmNameList_.size()]; median[indicator][problem] = new double[algorithmNameList_.size()]; stdDeviation[indicator][problem] = new double[algorithmNameList_.size()]; iqr[indicator][problem] = new double[algorithmNameList_.size()]; min[indicator][problem] = new double[algorithmNameList_.size()]; max[indicator][problem] = new double[algorithmNameList_.size()]; numberOfValues[indicator][problem] = new int[algorithmNameList_.size()]; for (int algorithm = 0; algorithm < algorithmNameList_.size(); algorithm++) { sort(data[indicator][problem][algorithm].begin(), data[indicator][problem][algorithm].end()); string directory = experimentBaseDirectory_; directory += "/" + algorithmNameList_[algorithm]; directory += "/" + problemList_[problem]; directory += "/" + indicatorList_[indicator]; //cout << "----" << directory << "-----" << endl; //calculateStatistics(data[indicator][problem][algorithm], meanV, medianV, minV, maxV, stdDeviationV, iqrV); calculateStatistics(data[indicator][problem][algorithm], &statValues); /* cout << "Mean: " << statValues["mean"] << endl; cout << "Median : " << statValues["median"] << endl; cout << "Std : " << statValues["stdDeviation"] << endl; cout << "IQR : " << statValues["iqr"] << endl; cout << "Min : " << statValues["min"] << endl; cout << "Max : " << statValues["max"] << endl; cout << "N_values: " << data[indicator][problem][algorithm].size() << endl; */ mean[indicator][problem][algorithm] = statValues["mean"]; median[indicator][problem][algorithm] = statValues["median"]; stdDeviation[indicator][problem][algorithm] = statValues["stdDeviation"]; iqr[indicator][problem][algorithm] = statValues["iqr"]; min[indicator][problem][algorithm] = statValues["min"]; max[indicator][problem][algorithm] = statValues["max"]; numberOfValues[indicator][problem][algorithm] = data[indicator][problem][algorithm].size(); } } } if (FileUtils::existsPath(latexDirectory_.c_str()) != 1) { if (FileUtils::createDirectory(latexDirectory_) == -1) { cout << "Error creating directory: " << latexDirectory_ << endl; exit(-1); } // if cout << "Creating " << latexDirectory_ << " directory" << endl; } // if cout << "Experiment name: " << experimentName_ << endl; string latexFile = latexDirectory_ + "/" + experimentName_ + ".tex"; printHeaderLatexCommands(latexFile); for (int i = 0; i < indicatorList_.size(); i++) { printMeanStdDev(latexFile, i, mean, stdDeviation); printMedianIQR(latexFile, i, median, iqr); } // for printEndLatexCommands(latexFile); // Free memory: for (int indicator = 0; indicator < indicatorList_.size(); indicator++) { for (int problem = 0; problem < problemList_.size(); problem++) { delete [] data[indicator][problem]; delete [] mean[indicator][problem]; delete [] median[indicator][problem]; delete [] stdDeviation[indicator][problem]; delete [] iqr[indicator][problem]; delete [] max[indicator][problem]; delete [] min[indicator][problem]; delete [] numberOfValues[indicator][problem]; } delete [] data[indicator]; delete [] mean[indicator]; delete [] median[indicator]; delete [] stdDeviation[indicator]; delete [] iqr[indicator]; delete [] max[indicator]; delete [] min[indicator]; delete [] numberOfValues[indicator]; } delete [] data; delete [] mean; delete [] median; delete [] stdDeviation; delete [] iqr; delete [] max; delete [] min; delete [] numberOfValues; } // generateLatexTables
double iAIDA::AIDA_Histogram_native::AIDA_Histogram2D::maxBinHeight() const { calculateStatistics(); return m_maxHeight; }
double iAIDA::AIDA_Histogram_native::AIDA_Histogram2D::meanX() const { calculateStatistics(); return m_meanX; }
int main() { bool endSim = false; Randomize(); while (true) { double invalidElectResult = 0; int voters; double spread; double votingError; int trialRuns; Vector<int> elections; while (true) { cout << "Enter number of voters (or 0 to end simulation): "; voters = GetInteger(); if (voters == 0) { endSim = true; break; } else if (voters < 0) cout << "Enter a positive integer." << endl; else break; } if (endSim) break; while (true) { cout << "Enter percentage spread between candidates (0 - 1.0): "; spread = GetReal(); if (spread >=0 && spread <= 1.0) break; else cout << "Spread must be between 0 and 1.0" << endl; } while (true) { cout << "Enter voting error percentage: (0 - 1.0): "; votingError = GetReal(); if (votingError >=0 && votingError <= 1.0) break; else cout << "Voting error must be between 0 and 1.0" << endl; } while (true) { cout << "Enter number of times to run the trial: "; trialRuns = GetInteger(); if (trialRuns >= 1) break; else cout << "Enter a positive integer" << endl; } for (int i = 0; i < trialRuns; i++) { invalidElectResult = RunElectionTrial(voters, spread, votingError); cout << "Trial " << i + 1 << ": "; cout << "Chance of an invalid election result after 500 trials = " << invalidElectResult << "%" << endl; elections.add(invalidElectResult); } scoreStatsT scores = calculateStatistics(elections); cout << "Scores: MIN = " << scores.min << endl; cout << "Scores: MAX = " << scores.max << endl; cout << "Scores: AVG = " << scores.avg << endl; cout << "Scores: NUM = " << scores.num << endl; cout << "********************************************************" << endl << endl; } return 0; }
double iAIDA::AIDA_Histogram_native::AIDA_Histogram2D::rmsY() const { calculateStatistics(); return m_rmsY; }
bool MuSlashRhoLambdaES::doNextGeneration() { /* QFile file ("evolution.log"); file.open(QIODevice::WriteOnly | QIODevice::Append); QTextStream out(&file); out << "\n\n--------------------------\nNew generation:\n"; */ bool ok = true; QList<ESIndividual*> offsprings; QList<double> bestParentsFitness; // generate offsprings for(int i = 0; i < mInformation.lambda; i++) { // select parents which should be used to generate current offspring QList<ESIndividual*> curParents = mMarriage->doMarriage(getPopulation(), mInformation.rho, mInformation); // check if an error occured if(curParents.size() <= 0){ return false; } // get fitness of the best parent qSort(curParents.begin(), curParents.end(), MuSlashRhoLambdaES::descendingOrder); bestParentsFitness.append(curParents.first()->getFitness()); /* out << "\tParents:\n"; for(int parentIndex = 0; parentIndex < curParents.size(); parentIndex++) { out << "\t\t" << curParents.at(parentIndex)->toString() << "\n"; } */ // recombine offspring strategy parameters from parents QList<OptimizationDouble> recombinedStratParas = mStrategyParametersRecombination->doRecombination( curParents, ESRecombination::STRATEGY, mInformation); // check if an error occured if(recombinedStratParas.size() <= 0){ return false; } // recombine offspring object parameters from parents QList<OptimizationDouble> recombinedObjParas = mObjectParametersRecombination->doRecombination( curParents, ESRecombination::OBJECT, mInformation); // check if an error occured if(recombinedObjParas.size() <= 0){ return false; } // mutate the offspring strategy parameters QList<OptimizationDouble> mutatedStratParas = mStrategyParametersMutation->doMutation( recombinedStratParas, mInformation); // check if an error occured if(mutatedStratParas.size() <= 0){ return false; } // mutate the offspring object parameters with help of is mutated strategy parameters QList<OptimizationDouble> mutatedObjParas = mObjectParametersMutation->doMutation( recombinedObjParas, mutatedStratParas, mInformation); // check if an error occured if(mutatedObjParas.size() <= 0){ return false; } // create new Individual and add it to offspring list offsprings.append( new ESIndividual(mutatedObjParas, mutatedStratParas, 0.0) ); /* out << "\t--> Offspring:\n"; out << "\t\t" << offsprings.last()->toString(); out << "\n\n"; */ } //calculate fitness values of offsprings ok &= mFitnessEvaluation->doEvaluation(offsprings, mInformation); if(ok == false){ return false; } // calculate sum of offsprings which have a better fitness then their best parent mInformation.numberOfLastSuccessfulIndividuals = 0; for(int i = 0; i < offsprings.size(); i++) { if(offsprings.at(i)->getFitness() > bestParentsFitness.at(i)){ mInformation.numberOfLastSuccessfulIndividuals++; } } bestParentsFitness.clear(); // sort offspring accordingly their fitness qSort(offsprings.begin(), offsprings.end(), MuSlashRhoLambdaES::descendingOrder); // get list with new population QList<ESIndividual*> selectionList = mSelection->doSelection(mPopulation, offsprings, mInformation.mu, mInformation); // check if an error occured if(selectionList.size() <= 0){ return false; } // delete old parents which are not in the selection deletePopulation(selectionList); // use selection as new population mPopulation = selectionList; // update statisticvalues in mInformation calculateStatistics(); mInformation.currentGeneration++; /* file.flush(); file.close(); */ return ok; }
const Column::ColumnStatistics& Column::statistics() { if (!statisticsAvailable()) calculateStatistics(); return m_column_private->statistics; }
int iAIDA::AIDA_Histogram_native::AIDA_Histogram2D::extraEntries() const { calculateStatistics(); return m_extraEntries; }