void FibHeap::Print(FibHeapNode *Tree, FibHeapNode *theParent) { FibHeapNode* Temp = NULL; if (Tree == NULL) Tree = MinRoot; Temp = Tree; do { if (Temp->Left == NULL) mexPrintf( "(Left is NULL)" ); Temp->Print(); if (Temp->Parent != theParent) mexPrintf("(Parent is incorrect)" ); if (Temp->Right == NULL) mexPrintf( "(Right is NULL)" ); else if (Temp->Right->Left != Temp) mexPrintf( "(Error in left link left) ->" ); else mexPrintf( " <-> " ); Temp = Temp->Right; /* if (kbhit() && getch() == 27) { cout << "Hit a key to resume or ESC to break\n"; if (getch() == 27) break; } */ } while (Temp != NULL && Temp != Tree); mexPrintf( "\n" ); Temp = Tree; do { mexPrintf( "Children of " ); Temp->Print(); mexPrintf( ": " ); if (Temp->Child == NULL) mexPrintf( "NONE\n" ); else Print(Temp->Child, Temp); Temp = Temp->Right; } while (Temp!=NULL && Temp != Tree); if (theParent == NULL) { char ch; mexPrintf( "\n\n\n" ); cin >> ch; }
void FibHeap::Print(FibHeapNode *Tree, FibHeapNode *theParent) { FibHeapNode* Temp = NULL; if (Tree == NULL) Tree = MinRoot; Temp = Tree; do { if (Temp->Left == NULL) cout << "(Left is NULL)"; Temp->Print(); if (Temp->Parent != theParent) cout << "(Parent is incorrect)"; if (Temp->Right == NULL) cout << "(Right is NULL)"; else if (Temp->Right->Left != Temp) cout << "(Error in left link left) ->"; else cout << " <-> "; Temp = Temp->Right; if (kbhit() && getch() == 27) { cout << "Hit a key to resume or ESC to break\n"; if (getch() == 27) break; } } while (Temp != NULL && Temp != Tree); cout << '\n'; Temp = Tree; do { cout << "Children of "; Temp->Print(); cout << ": "; if (Temp->Child == NULL) cout << "NONE\n"; else Print(Temp->Child, Temp); Temp = Temp->Right; } while (Temp!=NULL && Temp != Tree); if (theParent == NULL) { char ch; cout << "Done Printing. Hit a key.\n"; cin >> ch; }
void FibHeap::print(FibHeapNode *Tree, FibHeapNode *them_parent) { FibHeapNode* Temp = NULL; if (Tree == NULL) Tree = m_min_root; Temp = Tree; do { if (Temp->m_left == NULL) Rprintf( "(m_left is NULL)" ); Temp->print(); if (Temp->m_parent != them_parent) Rprintf("(m_parent is incorrect)" ); if (Temp->m_right == NULL) Rprintf( "(m_right is NULL)" ); else if (Temp->m_right->m_left != Temp) Rprintf( "(Error in m_left link m_left) ->" ); else Rprintf( " <-> " ); Temp = Temp->m_right; // if (kbhit() && getch() == 27) { // cout << "Hit a key to resume or ESC to break\n"; // if (getch() == 27) break; // } } while (Temp != NULL && Temp != Tree); Rprintf( "\n" ); Temp = Tree; do { Rprintf( "m_children of " ); Temp->print(); Rprintf( ": " ); if (Temp->m_child == NULL) Rprintf( "NONE\n" ); else print(Temp->m_child, Temp); Temp = Temp->m_right; } while (Temp!=NULL && Temp != Tree); if (them_parent == NULL) { // char ch; Rprintf( "\n\n\n" ); // cin >> ch; } }
/* * Adaptive Randomized Sampling Algorithm for Weighted graphs. The cut-off on the number of samples is n/20. */ void Adaptive_Sampling_Weighted(f64 ACB[], NETWORK *network, f64 c_thr, f64 sup, f64 &time_dif) { ui64 i, j, u, v, numSample, randvx; ui64 nvertices = (ui64) network->nvertices; // The number of vertices in the network ui64 count = 0; f64 u_distance, v_distance, edgeWeight; // Variables to store distance estimates or edge weights time_t start, end; // Time variables vector<ui64> sigma; // sigma is the number of shortest paths vector<f64> delta; // A vector storing dependency of the source vertex on all other vertices vector< vector <ui64> > PredList; // A list of predecessors of all vertices vector<ui64> SampleVertex; vector<ui64>::iterator it; // An iterator of vector elements vector<bool> Flag; stack <ui64> S; // A stack containing vertices in the order found by Dijkstra's Algorithm FibHeap PQueue; // A priority queue storing vertices FibHeapNode nodeTemp; // A particular node stored in the priority queue FibHeapNode *nodePtr; // Pointer to a vertex element stored in the priority queue vector<FibHeapNode *> nodeVector; // A vector of all priority queue elements // Set the start time of Randomized Brandes' Algorithm time(&start); nodeVector.assign ( nvertices, NULL ); for (i=0; i < nvertices; i++) { nodeVector[i] = new FibHeapNode(); nodeVector[i]->Set_vertexPosition(i); //Set all Nodes distance to unsigned long max ULONG_MAX that is assumed to be infinity nodeVector[i]->Set_key(ULONG_MAX); } // Generate random seed srand((unsigned)time(NULL)); numSample = (ui64) (nvertices/sup); if (numSample < 1) numSample = nvertices; SampleVertex.resize(numSample); Flag.assign(nvertices, false); for (i=0; i < numSample; i++) { // Generate a random vertex randvx = (ui64) ((((f64) rand())/((f64) RAND_MAX + 1.0))*nvertices); // Insert the randomly sampled vertex SampleVertex.push_back(randvx); } // Compute Randomized Betweenness Centrality using sampled vertices for (it= SampleVertex.begin(); it < SampleVertex.end(); it++) { count += 1; i = *it; /* Initialize */ PredList.assign(nvertices, vector <ui64> (0, 0)); sigma.assign(nvertices, 0); sigma[i] = 1; delta.assign(nvertices, 0); nodeVector[i]->Set_key(0); PQueue.Insert(nodeVector[i]); // While the priority queue is nonempty while (PQueue.GetNumNodes() != 0) { // Get the element in the priority queue with the minimum key nodePtr = PQueue.ExtractMin(); // Get the vertex corresponding to the queue element with the minimum key u = nodePtr->Get_vertexPosition(); // Push u onto the stack S. Needed later for betweenness computation S.push(u); // Shortest path distance from source i to vertex u u_distance = nodeVector[u]->Get_key(); // Iterate over all the neighbors of u for (j=0; j < (ui64) network->vertex[u].degree; j++) { // Get the neighbor v of vertex u v = (ui64) network->vertex[u].edge[j].target; // Get the weight of the edge (u,v) edgeWeight = (f64) network->vertex[u].edge[j].weight; // If v's shortest path distance estimate has not been set yet, then // set the distance estimate of v and store v in the priority queue if (nodeVector[v]->Get_key() == ULONG_MAX) { nodeVector[v]->Set_key(u_distance + edgeWeight); PQueue.Insert(nodeVector[v]); } // Get the current shortest path distance estimate of v v_distance = nodeVector[v]->Get_key(); /* Relax and Count */ if (v_distance == u_distance + edgeWeight) { sigma[v] += sigma[u]; PredList[v].push_back(u); } if (v_distance > u_distance + edgeWeight) { sigma[v] = sigma[u]; PredList[v].clear(); PredList[v].push_back(u); nodeTemp.Set_vertexPosition(v); nodeTemp.Set_key(u_distance + edgeWeight); if (PQueue.DecreaseKey(nodeVector[v], nodeTemp) != 0) cout << "Error decreasing the node key" << endl; } } // End For } // End While /* Accumulation */ while (!S.empty()) { u = S.top(); S.pop(); for (j=0; j < PredList[u].size(); j++) { delta[PredList[u][j]] += ((f64) sigma[PredList[u][j]]/sigma[u]) * (1+delta[u]); } if ((u != i) && (!Flag[u])) { ACB[u] += delta[u]; if (ACB[u] > c_thr * nvertices) { ACB[u] = nvertices * (ACB[u]/count); Flag[u] = true; } } // End If } // End While // Clear data for the next run PredList.clear(); sigma.clear(); delta.clear(); for (j=0; j < nvertices; j++) nodeVector[j]->Set_key(ULONG_MAX); } // End For for (i=0; i < nvertices; i++) { if (!Flag[i]) { ACB[i] = nvertices * (ACB[i]/numSample); } } // End time after Brandes' algorithm and the time difference time(&end); time_dif = difftime(end, start); cout << "It took " << time_dif << " seconds to calculate Adaptive Sampling Based Approximate Centrality Values in a weighted graph" << endl; // Deallocate memory for (i=0; i < nvertices; i++) delete nodeVector[i]; return; } // End of Adaptive_Sampling_Weighted
/* * Brandes Algorithm for weighted graphs */ void BrandesAlgorithm_Weighted(f64 CB[], NETWORK *network, f64 &time_dif) { ui64 i, j, u, v; ui64 nvertices = (ui64) network->nvertices; // The number of vertices in the network f64 u_distance, v_distance, edgeWeight; // Variables to store distance estimates or edge weights time_t start, end; // Time variables vector<ui64> sigma; // sigma is the number of shortest paths vector<f64> delta; // A vector storing dependency of the source vertex on all other vertices vector< vector <ui64> > PredList; // A list of predecessors of all vertices stack <ui64> S; // A stack containing vertices in the order found by Dijkstra's Algorithm FibHeap PQueue; // A priority queue storing vertices FibHeapNode nodeTemp; // A particular node stored in the priority queue FibHeapNode *nodePtr; // Pointer to a vertex element stored in the priority queue vector<FibHeapNode *> nodeVector; // A vector of all priority queue elements // Set the start time of Brandes' Algorithm time(&start); nodeVector.assign ( nvertices, NULL ); for (i=0; i < nvertices; i++) { nodeVector[i] = new FibHeapNode(); nodeVector[i]->Set_vertexPosition(i); //Set all Nodes distance to unsigned long max ULONG_MAX that is assumed to be infinity nodeVector[i]->Set_key(ULONG_MAX); } // Compute Betweenness Centrality for every vertex i for (i=0; i < nvertices; i++) { /* Initialize */ PredList.assign(nvertices, vector <ui64> (0, 0)); sigma.assign(nvertices, 0); sigma[i] = 1; delta.assign(nvertices, 0); nodeVector[i]->Set_key(0); PQueue.Insert(nodeVector[i]); // While the priority queue is nonempty while (PQueue.GetNumNodes() != 0) { // Get the element in the priority queue with the minimum key nodePtr = PQueue.ExtractMin(); // Get the vertex corresponding to the queue element with the minimum key u = nodePtr->Get_vertexPosition(); // Push u onto the stack S. Needed later for betweenness computation S.push(u); // Shortest path distance from source i to vertex u u_distance = nodeVector[u]->Get_key(); // Iterate over all the neighbors of u for (j=0; j < (ui64) network->vertex[u].degree; j++) { // Get the neighbor v of vertex u v = (ui64) network->vertex[u].edge[j].target; // Get the weight of the edge (u,v) edgeWeight = (f64) network->vertex[u].edge[j].weight; // If v's shortest path distance estimate has not been set yet, then // set the distance estimate of v and store v in the priority queue if (nodeVector[v]->Get_key() == ULONG_MAX) { nodeVector[v]->Set_key(u_distance + edgeWeight); PQueue.Insert(nodeVector[v]); } // Get the current shortest path distance estimate of v v_distance = nodeVector[v]->Get_key(); /* Relax and Count */ if (v_distance == u_distance + edgeWeight) { sigma[v] += sigma[u]; PredList[v].push_back(u); } if (v_distance > u_distance + edgeWeight) { sigma[v] = sigma[u]; PredList[v].clear(); PredList[v].push_back(u); nodeTemp.Set_vertexPosition(v); nodeTemp.Set_key(u_distance + edgeWeight); if (PQueue.DecreaseKey(nodeVector[v], nodeTemp) != 0) cout << "Error decreasing the node key" << endl; } } // End For } // End While /* Accumulation */ while (!S.empty()) { u = S.top(); S.pop(); for (j=0; j < PredList[u].size(); j++) { delta[PredList[u][j]] += ((f64) sigma[PredList[u][j]]/sigma[u]) * (1+delta[u]); } if (u != i) CB[u] += delta[u]; } // Clear data for the next run PredList.clear(); sigma.clear(); delta.clear(); for (j=0; j < nvertices; j++) nodeVector[j]->Set_key(ULONG_MAX); } // End For // End time after Brandes' algorithm and the time difference time(&end); time_dif = difftime(end, start); cout << "It took " << time_dif << " seconds to calculate Betweenness Centrality in an weighted graph" << endl; // Deallocate memory for (i=0; i < nvertices; i++) delete nodeVector[i]; return; } // End of BrandesAlgorithm_Weighted
//variation of dijkstra which returns previous array vector<int> dijkstra(Graph * g,int srcNode,int destNode) { fHeap = new FibonacciHeap();//create heap object FibHeapNode * fnode; string path; //used for printing later vector<int>distFromSource(g->numVertices); //array to hold dist of all vertices from the source vector<int>prev(g->numVertices);// stores previous vertex to every vertex in shortest path from source to said vertex map<int, FibHeapNode*> FheapNodeList;//store FibHeapNode pointers with vertex as key to be used in decrease key later distFromSource[srcNode] = 0; map<int, int> srcAdjacencyList; //adjacency list of source map<int, int>::iterator innerIt; for (innerIt = g->vertices[srcNode].begin(); innerIt != g->vertices[srcNode].end(); innerIt++) { srcAdjacencyList[innerIt->first] = innerIt->second;//populate src adjacency list } map<int, map<int, int> >::iterator it; for (it = g->vertices.begin(); it != g->vertices.end(); it++) { if (it->first != srcNode) { if (srcAdjacencyList.find(it->first) != srcAdjacencyList.end()) { //belongs to adjacency list distFromSource[it->first] = srcAdjacencyList[it->first];//dist is edge weight initially prev[it->first] = srcNode;//prev is source initially } else { //for all other nodes distFromSource[it->first] = 2000; prev[it->first] = NULL; } } fnode = fHeap->insert(it->first, distFromSource[it->first]);//put node in heap FheapNodeList.insert(pair<int, FibHeapNode *>(it->first, fnode));//add pointer to map } while (!fHeap->empty()) { FibHeapNode * min = fHeap->minimum(); int minKey = min->key(); int minKeyLabel = min->data(); fHeap->removeMin(); it = g->vertices.find(minKeyLabel); for (innerIt = (it->second).begin(); innerIt != (it->second).end(); innerIt++) { int alt = distFromSource[minKeyLabel] + innerIt->second; if (alt < distFromSource[innerIt->first]) { distFromSource[innerIt->first] = alt; prev[innerIt->first] = minKeyLabel; fHeap->decreaseKey((FheapNodeList.find(innerIt->first))->second, alt); //retrieve node pointer from map and pass to decrease key } } } if(destNode == destNodeLabel && srcNode == srcNodeLabel) { cout<<distFromSource[destNodeLabel]<<"\n" ; } return prev; }