util::Clustering::Clustering(const DataVector& _data, unsigned _max) : m_maxClusters(_max) { m_data.insert(m_data.begin(), _data.begin(), _data.end()); ClusterMap clusterQualityScores; for (unsigned clusterCount = 1; clusterCount <= m_maxClusters; clusterCount++) { clusterQualityScores[clusterCount] = _kmeans(clusterCount); } std::vector< CountClusterPair > sortedScores; std::copy(clusterQualityScores.begin(), clusterQualityScores.end(), std::back_inserter(sortedScores)); ScoreComparator comparator; std::sort(sortedScores.begin(), sortedScores.end(), comparator); report("Scores:"); for (int i = 0; i < sortedScores.size(); i++) { report(sortedScores[i].first << " clusters: " << sortedScores[i].second.getScore()); } report("Clustering with highest score: "); report(util::Indents(2) << "cluster count: " << sortedScores[0].first); report(util::Indents(2) << "aggregate score: " << sortedScores[0].second.getScore()); report(util::Indents(2) << "detected clusters:"); for (auto it = sortedScores[0].second.getClusters().begin(); it != sortedScores[0].second.getClusters().end(); ++it) { const Cluster& cluster = *it; report(util::Indents(4) << "position = " << cluster.position << ", elements = " << cluster.data.size()); } result = sortedScores[0].second; }
void CodeAtlas::RandomWalkClusterer::collectResult() { typedef QHash<int, int> ClusterMap; ClusterMap cMap; for (int i = 0; i < m_vtx.size(); ++i) { int& cID = m_vtx[i].m_clusterID; if (!cMap.contains(cID)) { int nc = cMap.size(); cMap[cID] = nc; } cID = cMap[cID]; } m_nCluster = cMap.size(); }
HAAStarResult RoutePlanner::setupHierarchicalOpenList(Unit *unit, const Vec2i &target) { // get Transitions for start cluster Transitions transitions; Vec2i startCluster = ClusterMap::cellToCluster(unit->getPos()); ClusterMap *clusterMap = world->getCartographer()->getClusterMap(); clusterMap->getTransitions(startCluster, unit->getCurrField(), transitions); DiagonalDistance dd(target); nsgSearchEngine->getNeighbourFunc().setSearchCluster(startCluster); bool startTrap = true; // attempt quick path from unit->pos to each transition, // if successful add transition to open list AnnotatedMap *aMap = world->getCartographer()->getMasterMap(); aMap->annotateLocal(unit); for (Transitions::iterator it = transitions.begin(); it != transitions.end(); ++it) { float cost = quickSearch(unit->getCurrField(), unit->getSize(), unit->getPos(), (*it)->nwPos); if (cost != numeric_limits<float>::infinity()) { tSearchEngine->setOpen(*it, dd((*it)->nwPos), cost); startTrap = false; } } aMap->clearLocalAnnotations(unit); if (startTrap) { // do again, without annnotations, return TRAPPED if all else goes well bool locked = true; for (Transitions::iterator it = transitions.begin(); it != transitions.end(); ++it) { float cost = quickSearch(unit->getCurrField(), unit->getSize(), unit->getPos(), (*it)->nwPos); if (cost != numeric_limits<float>::infinity()) { tSearchEngine->setOpen(*it, dd((*it)->nwPos), cost); locked = false; } } if (locked) { return HAAStarResult::FAILURE; } } if (startTrap) { return HAAStarResult::START_TRAP; } return HAAStarResult::COMPLETE; }
void CodeAtlas::RandomWalkClusterer::randomWalk() { srand(m_seed); int nVtx = m_vtx.size(); typedef QHash<int, double> ClusterMap; typedef QHash<int, float> EdgeMap; for (int ithIter = 0 ; ithIter < m_maxIter;) { // choose a vtx to modify randomly int curVtxID = rand()%nVtx;//randInt(nVtx); //printf("vtx %d\n", curVtxID); Vertex& curVtx= m_vtx[curVtxID]; // collect near cluster ClusterMap nearClusterWeight; for (EdgeMap::Iterator pE = curVtx.m_outEdge.begin(); pE != curVtx.m_outEdge.end(); ++pE) { int tarID = pE.key(); double w = pE.value(); if (w == 0) continue; Vertex& nearVtx = m_vtx[tarID]; if (nearClusterWeight.contains(nearVtx.m_clusterID)) { nearClusterWeight[nearVtx.m_clusterID] += w; } else { nearClusterWeight[nearVtx.m_clusterID] = w; } } if (nearClusterWeight.size() == 0) continue; // compute probability double maxWeight = 0; for (ClusterMap::Iterator pC = nearClusterWeight.begin(); pC != nearClusterWeight.end(); ++pC) { //printf("%lf ", pC.value()); maxWeight = max(maxWeight, pC.value()); } //printf("\n"); double weightSum = 0; double t = m_t; for (ClusterMap::Iterator pC = nearClusterWeight.begin(); pC != nearClusterWeight.end(); ++pC) { double w = exp((pC.value() - maxWeight)/ t); pC.value() = w; weightSum += w; } for (ClusterMap::Iterator pC = nearClusterWeight.begin(); pC != nearClusterWeight.end(); ++pC) { pC.value() /= weightSum; //printf("%lf ", pC.value()); } //printf("\n"); // choose new cluster double prob = randFloat(); double accProb = 0; int newCluster = -1; for (ClusterMap::Iterator pC = nearClusterWeight.begin(); pC != nearClusterWeight.end(); ++pC) { accProb += pC.value(); if (accProb > prob) { newCluster = pC.key(); } } // prevent round-off error if (newCluster == -1) { newCluster = (nearClusterWeight.end() - 1).key(); } //printf("old: %d new: %d\n\n", curVtx.m_clusterID, newCluster); // update graph curVtx.m_clusterID = newCluster; ++ithIter; } }