// Process a block of nodes bool DataScaling::saveLevel(int level, ofstream& nodeFile, ofstream& edgeFile, ofstream& adFile) { // Get this level's list of nodes NodeList* nodeList = this->nodes[level]; // Find and create edges between nodes on the same level this->makeSiblingEdges(nodeList, level); // Flush this block from memory for (unsigned int i = 0; i < nodeList->size(); ++i) { Node* node = nodeList->at(i); // Output node to the nodeFile node->print(nodeFile); // Output a matching ad to the adFile // "Known truth" allows us to measure quality of the system node->print(adFile); // Output this node's edges to the edgeFile node->printEdges(edgeFile); // Deallocate the node delete node; } // Empty the node list nodeList->clear(); return(true); }
void initFlow() { initNet(); initHeight(); for (int i = 0; i < v_sz; ++i) cur[i] = net[i].size() - 1; lst.clear(); for (; cur[s] >= 0; --cur[s])push(s); }
void findAll(const Path& path, const dom::NodePtr node, NodeList& result) { const detail::LocationStepList& steps = path.getStepList().steps; detail::Context context(node.get()); result.clear(); detail::NodePtrVec temp; findNodes(context,steps.begin(),steps.end(),temp,false); for( detail::NodePtrVec::const_iterator it=temp.begin(); it!=temp.end(); ++it ) { result.push_back( (*it)->self().lock() ); } }
void ParallelBFS::calculate(NodeId root) { distance.assign((size_t) vertex_count, infinity); NodeId level = 1; NodeList frontier; frontier.reserve((size_t)vertex_count); if (comm.rank() == find_owner(root)) { frontier.push_back(root - first_vertex); distance[root - first_vertex] = 0; } std::vector<NodeList> send_buf((size_t)comm.size()); NodeList new_frontier; NodeList sizes((size_t)comm.size()), displacements((size_t)comm.size()); while (mpi::all_reduce(comm, (NodeId)frontier.size(), std::plus<NodeId>()) > 0) { for (NodeId u : frontier) for (int e = vertices[u]; e < vertices[u + 1]; ++e) { int v = edges[e]; send_buf[find_owner(v)].push_back(v); } for (int i = 0; i < comm.size(); ++i) { mpi::gather(comm, (NodeId)send_buf[i].size(), sizes.data(), i); if (i == comm.rank()) { for (int j = 1; j < comm.size(); ++j) displacements[j] = displacements[j - 1] + sizes[j - 1]; new_frontier.resize( (size_t)(displacements[comm.size()-1] + sizes[comm.size() - 1])); mpi::gatherv(comm, send_buf[i], new_frontier, sizes, displacements, i); } else { mpi::gatherv(comm, send_buf[i], i); } } for (size_t i = 0; i < comm.size(); ++i) send_buf[i].clear(); frontier.clear(); for (int v : new_frontier) { v -= first_vertex; if (distance[v] == infinity) { distance[v] = level; frontier.push_back(v); } } ++level; } }
void sgNode::getInheritsNodes( NodeList &outlist, size_t count /*= 0*/, const StringHandleSet &classTypefilter /*= StringHandleSet()*/ ) { outlist.clear(); if(count > 0) outlist.reserve(count); else count = size_t(-1) - 1; bool doFilter = true; if(classTypefilter.empty()) doFilter = false; size_t mycount = 0; if(mycount == count) return ; sg_list(sgNode*) nodeQueue; nodeQueue.push_back(this); ChildNodeMap *childMap = &m_Children; ChildNodeMap::const_iterator it = childMap->begin(); while(mycount < count && !nodeQueue.empty()) { sgNode *node = nodeQueue.front(); nodeQueue.pop_front(); // do sth. if(!doFilter) { outlist.push_back(node); ++mycount; } else if(classTypefilter.find(node->GetMyClassName()) != classTypefilter.end()) { outlist.push_back(node); ++mycount; } childMap = &(node->m_Children); it = childMap->begin(); for(; it!=childMap->end(); ++it) { nodeQueue.push_back(it->second); } } }
void emptyTree(NodeSet roots) { TreeNode *tempNode = 0, *parentNode = 0; NodeSetIter setIter; NodeList nodeList; NodeListIter listIter; for(setIter=roots.begin(); setIter!=roots.end(); ++setIter) { tempNode=0; parentNode=0; if(*setIter!=0) { nodeList.push_front(*setIter); while (nodeList.size()!=0) { listIter=nodeList.begin(); tempNode=(*listIter); nodeList.pop_front(); if (tempNode->right==0 && tempNode->left==0) { parentNode=tempNode->parent; if (parentNode->right->ID==tempNode->ID) parentNode->right = 0; else parentNode->left=0; delete tempNode; tempNode=0; } else { if(tempNode->right!=0) nodeList.push_front(tempNode->right); if(tempNode->left!=0) nodeList.push_front(tempNode->left); } } } nodeList.clear(); } }
size_t BronKerbosch::computeDegeneracy(NodeList& order) { // Requires O(|V| + |E|) time order.clear(); typedef typename Graph::template NodeMap<size_t> DegNodeMap; typedef typename Graph::template NodeMap<typename NodeList::iterator> NodeListItMap; BoolNodeMap present(_g, true); DegNodeMap deg(_g, 0); size_t maxDeg = 0; NodeListItMap it(_g); // compute node degrees, O(|E|) time for (NodeIt v(_g); v != lemon::INVALID; ++v) { size_t d = 0; for (IncEdgeIt e(_g, v); e != lemon::INVALID; ++e, ++d); deg[v] = d; if (d > maxDeg) maxDeg = d; } // fill T, O(d) time NodeListVector T(maxDeg + 1, NodeList()); for (NodeIt v(_g); v != lemon::INVALID; ++v) { size_t d = deg[v]; T[d].push_front(v); it[v] = T[d].begin(); } size_t degeneracy = 0; // O(|V|) time, Eppstein et al. (2010) const size_t n = T.size(); size_t i = 0; while (i < n) { NodeList& l = T[i]; if (T[i].size() > 0) { Node v = l.front(); l.pop_front(); order.push_back(v); present[v] = false; if (deg[v] > degeneracy) { degeneracy = deg[v]; } //std::cout << "Removed " << _g.id(v) << std::endl; for (IncEdgeIt e(_g, v); e != lemon::INVALID; ++e) { Node w = _g.oppositeNode(v, e); if (present[w]) { size_t deg_w = deg[w]; typename NodeList::iterator it_w = it[w]; T[deg_w - 1].splice(T[deg_w - 1].begin(), T[deg_w], it_w); deg[w]--; } } i = 0; } else { ++i; } } //std::cerr << "Degeneracy: " << degeneracy << std::endl; return degeneracy; }
void pqClear() { pri_List.clear(); }