std::vector<std::pair<population::size_type,std::vector<population::individual_type>::size_type> > random_r_policy::select(const std::vector<population::individual_type> &immigrants, const population &dest) const { const population::size_type rate_limit = std::min<population::size_type>(get_n_individuals(dest),boost::numeric_cast<population::size_type>(immigrants.size())); // Temporary vectors to store sorted indices of the populations. std::vector<population::size_type> immigrants_idx(boost::numeric_cast<std::vector<population::size_type>::size_type>(immigrants.size())); std::vector<population::size_type> dest_idx(boost::numeric_cast<std::vector<population::size_type>::size_type>(dest.size())); // Fill in the arrays of indices. iota(immigrants_idx.begin(),immigrants_idx.end(),population::size_type(0)); iota(dest_idx.begin(),dest_idx.end(),population::size_type(0)); // Permute the indices (immigrants). for (population::size_type i = 0; i < rate_limit; ++i) { population::size_type next_idx = i + (m_urng() % (rate_limit - i)); if (next_idx != i) { std::swap(immigrants_idx[i], immigrants_idx[next_idx]); } } // Permute the indices (destination). for (population::size_type i = 0; i < rate_limit; ++i) { population::size_type next_idx = i + (m_urng() % (dest.size() - i)); if (next_idx != i) { std::swap(dest_idx[i], dest_idx[next_idx]); } } // Return value. std::vector<std::pair<population::size_type,std::vector<population::individual_type>::size_type> > retval; for (population::size_type i = 0; i < rate_limit; ++i) { retval.push_back(std::make_pair(dest_idx[i],immigrants_idx[i])); } return retval; }
std::vector<population::individual_type> best_s_policy::select(const population &pop) const { const population::size_type migration_rate = get_n_individuals(pop); // Create a temporary array of individuals. std::vector<population::individual_type> result(pop.begin(),pop.end()); // Sort the individuals (best go first). std::sort(result.begin(),result.end(),dom_comp(pop)); // Leave only desired number of elements in the result. result.erase(result.begin() + migration_rate,result.end()); return result; }
std::vector<population::individual_type> best_kill_s_policy::select(population &pop) const { pagmo_assert(get_n_individuals(pop) <= pop.size()); // Gets the number of individuals to select const population::size_type migration_rate = get_n_individuals(pop); // Create a temporary array of individuals. std::vector<population::individual_type> result; // Gets the indexes of the best individuals std::vector<population::size_type> best_idx = pop.get_best_idx(migration_rate); // Puts the best individuals in results for (population::size_type i =0; i< migration_rate; ++i) { result.push_back(pop.get_individual(best_idx[i])); } // Remove them from the original population // (note: the champion will still carry information on the best guy ...) for (population::size_type i=0 ; i<migration_rate; ++i) { pop.reinit(best_idx[i]); } return result; }
// Selection implementation. std::vector<std::pair<population::size_type,std::vector<population::individual_type>::size_type> > worst_r_policy::select(const std::vector<population::individual_type> &immigrants, const population &dest) const { const population::size_type rate_limit = std::min<population::size_type>(get_n_individuals(dest),boost::numeric_cast<population::size_type>(immigrants.size())); // Temporary vectors to store sorted indices of the populations. std::vector<population::size_type> immigrants_idx(boost::numeric_cast<std::vector<population::size_type>::size_type>(immigrants.size())); std::vector<population::size_type> dest_idx(boost::numeric_cast<std::vector<population::size_type>::size_type>(dest.size())); // Fill in the arrays of indices. iota(immigrants_idx.begin(),immigrants_idx.end(),population::size_type(0)); iota(dest_idx.begin(),dest_idx.end(),population::size_type(0)); // Sort the arrays of indices. // From best to worst. std::sort(immigrants_idx.begin(),immigrants_idx.end(),indirect_individual_sorter<std::vector<population::individual_type> >(immigrants,dest)); // From worst to best. std::sort(dest_idx.begin(),dest_idx.end(),indirect_individual_sorter<population>(dest,dest)); std::reverse(dest_idx.begin(),dest_idx.end()); // Create the result. std::vector<std::pair<population::size_type,std::vector<population::individual_type>::size_type> > result; for (population::size_type i = 0; i < rate_limit; ++i) { // Similar to fair policy, but replace unconditionally, without checking if the incoming individuals are better. result.push_back(std::make_pair(dest_idx[i],immigrants_idx[i])); } return result; }
// Selection implementation. std::vector<std::pair<population::size_type,std::vector<population::individual_type>::size_type> > hv_fair_r_policy::select(const std::vector<population::individual_type> &immigrants, const population &dest) const { // Fall back to fair_r_policy when facing a single-objective problem. if (dest.problem().get_f_dimension() == 1) { return fair_r_policy(m_rate, m_type).select(immigrants, dest); } std::vector<population::individual_type> filtered_immigrants; filtered_immigrants.reserve(immigrants.size()); // Keeps information on the original indexing of immigrants after we filter out the duplicates std::vector<unsigned int> original_immigrant_indices; original_immigrant_indices.reserve(immigrants.size()); // Remove the duplicates from the set of immigrants std::vector<population::individual_type>::iterator im_it = (const_cast<std::vector<population::individual_type> &>(immigrants)).begin(); unsigned int im_idx = 0; for( ; im_it != immigrants.end() ; ++im_it) { decision_vector im_x((*im_it).cur_x); bool equal = true; for ( unsigned int idx = 0 ; idx < dest.size() ; ++idx ) { decision_vector isl_x(dest.get_individual(idx).cur_x); equal = true; for (unsigned int d_idx = 0 ; d_idx < im_x.size() ; ++d_idx) { if (im_x[d_idx] != isl_x[d_idx]) { equal = false; break; } } if (equal) { break; } } if (!equal) { filtered_immigrants.push_back(*im_it); original_immigrant_indices.push_back(im_idx); } ++im_idx; } // Computes the number of immigrants to be selected (accounting for the destination pop size) const population::size_type rate_limit = std::min<population::size_type>(get_n_individuals(dest), boost::numeric_cast<population::size_type>(filtered_immigrants.size())); // Defines the retvalue std::vector<std::pair<population::size_type, std::vector<population::individual_type>::size_type> > result; // Skip the remaining computation if there's nothing to do if (rate_limit == 0) { return result; } // Makes a copy of the destination population population pop_copy(dest); // Merge the immigrants to the copy of the destination population for (population::size_type i = 0; i < rate_limit; ++i) { pop_copy.push_back(filtered_immigrants[i].cur_x); } // Population fronts stored as indices of individuals. std::vector< std::vector<population::size_type> > fronts_i = pop_copy.compute_pareto_fronts(); // Population fronts stored as fitness vectors of individuals. std::vector< std::vector<fitness_vector> > fronts_f (fronts_i.size()); // Nadir point is established manually later, first point is a first "safe" candidate. fitness_vector refpoint(pop_copy.get_individual(0).cur_f); // Fill fronts_f with fitness vectors and establish the nadir point for (unsigned int f_idx = 0 ; f_idx < fronts_i.size() ; ++f_idx) { fronts_f[f_idx].resize(fronts_i[f_idx].size()); for (unsigned int p_idx = 0 ; p_idx < fronts_i[f_idx].size() ; ++p_idx) { fronts_f[f_idx][p_idx] = fitness_vector(pop_copy.get_individual(fronts_i[f_idx][p_idx]).cur_f); // Update the nadir point manually for efficiency. for (unsigned int d_idx = 0 ; d_idx < fronts_f[f_idx][p_idx].size() ; ++d_idx) { refpoint[d_idx] = std::max(refpoint[d_idx], fronts_f[f_idx][p_idx][d_idx]); } } } // Epsilon is added to nadir point for (unsigned int d_idx = 0 ; d_idx < refpoint.size() ; ++d_idx) { refpoint[d_idx] += m_nadir_eps; } // Vector for maintaining the original indices of points for augmented population as 0 and 1 std::vector<unsigned int> g_orig_indices(pop_copy.size(), 1); unsigned int no_discarded_immigrants = 0; // Store which front we process (start with the last front) and the number of processed individuals. unsigned int front_idx = fronts_i.size(); // front_idx is equal to the size, since it's decremented right in the main loop unsigned int processed_individuals = 0; // Pairs of (islander index, islander exclusive hypervolume) // Second item is updated later std::vector<std::pair<unsigned int, double> > discarded_islanders; std::vector<std::pair<unsigned int, double> > point_pairs; // index of currently processed point in the point_pair vector. // Initiated to its size (=0) in order to enforce the initial computation on penultimate front. unsigned int current_point = point_pairs.size(); // Stops when we reduce the augmented population to the size of the original population or when the number of discarded islanders reaches the limit while (processed_individuals < filtered_immigrants.size() && discarded_islanders.size() < rate_limit) { // if current front was exhausted, load next one if (current_point == point_pairs.size()) { --front_idx; // Compute contributions std::vector<double> c; // If there exist a dominated front for front at index front_idx if (front_idx + 1 < fronts_f.size()) { std::vector<fitness_vector> merged_front; // Reserve the memory and copy the fronts merged_front.reserve(fronts_f[front_idx].size() + fronts_f[front_idx + 1].size()); copy(fronts_f[front_idx].begin(), fronts_f[front_idx].end(), back_inserter(merged_front)); copy(fronts_f[front_idx + 1].begin(), fronts_f[front_idx +1].end(), back_inserter(merged_front)); hypervolume hv(merged_front, false); c = hv.contributions(refpoint); } else { hypervolume hv(fronts_f[front_idx], false); c = hv.contributions(refpoint); } // Initiate the pairs and sort by second item (exclusive volume) point_pairs.resize(fronts_f[front_idx].size()); for(unsigned int i = 0 ; i < fronts_f[front_idx].size() ; ++i) { point_pairs[i] = std::make_pair(i, c[i]); } current_point = 0; std::sort(point_pairs.begin(), point_pairs.end(), sort_point_pairs_asc); } unsigned int orig_lc_idx = fronts_i[front_idx][point_pairs[current_point].first]; if (orig_lc_idx < dest.size()) { discarded_islanders.push_back(std::make_pair(orig_lc_idx, 0.0)); } else { ++no_discarded_immigrants; } // Flag given individual as discarded g_orig_indices[orig_lc_idx] = 0; ++processed_individuals; ++current_point; } // Number of non-discarded immigrants unsigned int no_available_immigrants = boost::numeric_cast<unsigned int>(filtered_immigrants.size() - no_discarded_immigrants); // Pairs of (immigrant index, immigrant exclusive hypervolume) // Second item is updated later std::vector<std::pair<unsigned int, double> > available_immigrants; available_immigrants.reserve(no_available_immigrants); for(unsigned int idx = dest.size() ; idx < pop_copy.size() ; ++idx) { // If the immigrant was not discarded add it to the available set if ( g_orig_indices[idx] == 1 ) { available_immigrants.push_back(std::make_pair(idx, 0.0)); } } // Aggregate all points to establish the hypervolume contribution of available immigrants and discarded islanders std::vector<fitness_vector> merged_fronts; merged_fronts.reserve(pop_copy.size()); for(unsigned int idx = 0 ; idx < pop_copy.size() ; ++idx) { merged_fronts.push_back(pop_copy.get_individual(idx).cur_f); } hypervolume hv(merged_fronts, false); std::vector<std::pair<unsigned int, double> >::iterator it; for(it = available_immigrants.begin() ; it != available_immigrants.end() ; ++it) { (*it).second = hv.exclusive((*it).first, refpoint); } for(it = discarded_islanders.begin() ; it != discarded_islanders.end() ; ++it) { (*it).second = hv.exclusive((*it).first, refpoint); } // Sort islanders and immigrants according to exclusive hypervolume sort(available_immigrants.begin(), available_immigrants.end(), hv_fair_r_policy::ind_cmp); sort(discarded_islanders.begin(), discarded_islanders.end(), hv_fair_r_policy::ind_cmp); // Number of exchanges is the minimum of the number of non discarded immigrants and the number of discarded islanders unsigned int no_exchanges = std::min(boost::numeric_cast<unsigned int>(available_immigrants.size()), boost::numeric_cast<unsigned int>(discarded_islanders.size())); it = available_immigrants.begin(); std::vector<std::pair<unsigned int, double> >::reverse_iterator r_it = discarded_islanders.rbegin(); // Match the best immigrant (forward iterator) with the worst islander (reverse iterator) no_exchanges times. for(unsigned int i = 0 ; i < no_exchanges ; ++i) { // Break if any islander is better than an immigrant if ((*r_it).second > (*it).second) { break; } // Push the pair (islander_idx, fixed_immigrant_idx) to the results result.push_back(std::make_pair((*r_it).first, original_immigrant_indices[(*it).first - dest.size()])); ++r_it; ++it; } return result; }
std::vector<population::individual_type> hv_greedy_s_policy::select(population &pop) const { // Fall back to best_s_policy when facing a single-objective problem. if (pop.problem().get_f_dimension() == 1) { return best_s_policy(m_rate, m_type).select(pop); } pagmo_assert(get_n_individuals(pop) <= pop.size()); // Gets the number of individuals to select const population::size_type migration_rate = get_n_individuals(pop); // Create a temporary array of individuals. std::vector<population::individual_type> result; // Indices of fronts. std::vector< std::vector< population::size_type> > fronts_i = pop.compute_pareto_fronts(); // Fitness vectors of individuals according to the indices above. std::vector< std::vector< fitness_vector> > fronts_f (fronts_i.size()); // Nadir point is established manually later, first point is as a first "safe" candidate. fitness_vector refpoint(pop.get_individual(0).cur_f); for (unsigned int f_idx = 0 ; f_idx < fronts_i.size() ; ++f_idx) { fronts_f[f_idx].resize(fronts_i[f_idx].size()); for (unsigned int p_idx = 0 ; p_idx < fronts_i[f_idx].size() ; ++p_idx) { fronts_f[f_idx][p_idx] = fitness_vector(pop.get_individual(fronts_i[f_idx][p_idx]).cur_f); // Update the nadir point manually for efficiency. for (unsigned int d_idx = 0 ; d_idx < fronts_f[f_idx][p_idx].size() ; ++d_idx) { refpoint[d_idx] = std::max(refpoint[d_idx], fronts_f[f_idx][p_idx][d_idx]); } } } // Epsilon is added to nadir point for (unsigned int d_idx = 0 ; d_idx < refpoint.size() ; ++d_idx) { refpoint[d_idx] += m_nadir_eps; } // Store which front we process (start with front 0) and the number of processed individuals. unsigned int front_idx = 0; unsigned int processed_individuals = 0; // Vector for maintaining the original indices of points std::vector<unsigned int> orig_indices; while (processed_individuals < migration_rate) { // If we need to pull every point from given front anyway, just push back the individuals right away if (fronts_f[front_idx].size() <= (migration_rate - processed_individuals)) { for(unsigned int i = 0 ; i < fronts_i[front_idx].size() ; ++i) { result.push_back(pop.get_individual(fronts_i[front_idx][i])); } processed_individuals += fronts_f[front_idx].size(); ++front_idx; } else { // Prepare the vector for the original indices if (orig_indices.size() == 0) { orig_indices.resize(fronts_i[front_idx].size()); iota(orig_indices.begin(), orig_indices.end(), 0); } // Compute the greatest contributor hypervolume hv(fronts_f[front_idx], false); hv.set_copy_points(false); unsigned int gc_idx = hv.greatest_contributor(refpoint); result.push_back(pop.get_individual(fronts_i[front_idx][orig_indices[gc_idx]])); // Remove it from the front along with its index orig_indices.erase(orig_indices.begin() + gc_idx); fronts_f[front_idx].erase(fronts_f[front_idx].begin() + gc_idx); ++processed_individuals; } } return result; }