예제 #1
0
/* 	 
 * Adaptive Randomized Sampling Algorithm for Weighted graphs. The cut-off on the number of samples is n/20. 
*/ 
void Adaptive_Sampling_Weighted(f64 ACB[], NETWORK *network, f64 c_thr, f64 sup, f64 &time_dif) { 
	
	ui64 i, j, u, v, numSample, randvx;
	ui64 nvertices = (ui64) network->nvertices;	// The number of vertices in the network
	ui64 count = 0; 
	f64 u_distance, v_distance, edgeWeight;		// Variables to store distance estimates or edge weights
	
	time_t start, end;							// Time variables
	
	vector<ui64> sigma;							// sigma is the number of shortest paths
	vector<f64> delta;							// A vector storing dependency of the source vertex on all other vertices
	vector< vector <ui64> > PredList;			// A list of predecessors of all vertices 
	
	vector<ui64> SampleVertex; 
	vector<ui64>::iterator it;					// An iterator of vector elements
	vector<bool> Flag; 
	
	stack <ui64> S;								// A stack containing vertices in the order found by Dijkstra's Algorithm
	
	FibHeap PQueue;								// A priority queue storing vertices
	FibHeapNode nodeTemp;						// A particular node stored in the priority queue
	FibHeapNode *nodePtr;						// Pointer to a vertex element stored in the priority queue
	vector<FibHeapNode *> nodeVector;			// A vector of all priority queue elements 
	
	// Set the start time of Randomized Brandes' Algorithm
	time(&start); 

	nodeVector.assign ( nvertices, NULL );
	for (i=0; i < nvertices; i++) {
		nodeVector[i] = new FibHeapNode(); 
		nodeVector[i]->Set_vertexPosition(i); 
		//Set all Nodes distance to unsigned long max ULONG_MAX that is assumed to be infinity
		nodeVector[i]->Set_key(ULONG_MAX); 
	}
	
	// Generate random seed 
	srand((unsigned)time(NULL)); 
	
	numSample = (ui64) (nvertices/sup); 
	
	if (numSample < 1) 
		numSample = nvertices; 
	
	SampleVertex.resize(numSample); 
	Flag.assign(nvertices, false); 
	
	for (i=0; i < numSample; i++) {
		
		// Generate a random vertex
		randvx = (ui64) ((((f64) rand())/((f64) RAND_MAX + 1.0))*nvertices); 
		
		// Insert the randomly sampled vertex
		SampleVertex.push_back(randvx); 
	}
	
	// Compute Randomized Betweenness Centrality using sampled vertices
	for (it= SampleVertex.begin(); it < SampleVertex.end(); it++) {
		
		count += 1;
		i = *it; 
		
		/* Initialize */ 
		PredList.assign(nvertices, vector <ui64> (0, 0)); 
		sigma.assign(nvertices, 0); 
		sigma[i] = 1; 
		delta.assign(nvertices, 0); 
		
		nodeVector[i]->Set_key(0); 
		PQueue.Insert(nodeVector[i]); 
		
		// While the priority queue is nonempty 
		while (PQueue.GetNumNodes() != 0) {
			// Get the element in the priority queue with the minimum key 
			nodePtr = PQueue.ExtractMin(); 
			// Get the vertex corresponding to the queue element with the minimum key
			u = nodePtr->Get_vertexPosition(); 
			// Push u onto the stack S. Needed later for betweenness computation
			S.push(u); 
			// Shortest path distance from source i to vertex u
			u_distance = nodeVector[u]->Get_key(); 
			// Iterate over all the neighbors of u 
			for (j=0; j < (ui64) network->vertex[u].degree; j++) { 
				// Get the neighbor v of vertex u
				v = (ui64) network->vertex[u].edge[j].target; 
				// Get the weight of the edge (u,v) 
				edgeWeight = (f64) network->vertex[u].edge[j].weight; 
				// If v's shortest path distance estimate has not been set yet, then 
				// set the distance estimate of v and store v in the priority queue
				if (nodeVector[v]->Get_key() == ULONG_MAX) {
					nodeVector[v]->Set_key(u_distance + edgeWeight); 
					PQueue.Insert(nodeVector[v]); 
				}
				// Get the current shortest path distance estimate of v
				v_distance = nodeVector[v]->Get_key(); 
				
				/* Relax and Count */ 
				if (v_distance == u_distance + edgeWeight) { 
					sigma[v] += sigma[u]; 
					PredList[v].push_back(u); 
				}
				if (v_distance > u_distance + edgeWeight) {
					sigma[v] = sigma[u]; 
					PredList[v].clear(); 
					PredList[v].push_back(u); 
					nodeTemp.Set_vertexPosition(v); 
					nodeTemp.Set_key(u_distance + edgeWeight); 
					if (PQueue.DecreaseKey(nodeVector[v], nodeTemp) != 0) 
						cout << "Error decreasing the node key" << endl; 
				}
				
			} // End For 
			
		} // End While
		
		
		/* Accumulation */ 
		while (!S.empty()) { 
			u = S.top(); 
			S.pop(); 
			for (j=0; j < PredList[u].size(); j++) {
				delta[PredList[u][j]] += ((f64) sigma[PredList[u][j]]/sigma[u]) * (1+delta[u]); 
			}
			
			if ((u != i) && (!Flag[u])) {
				ACB[u] += delta[u]; 
				if (ACB[u] > c_thr * nvertices) {
					ACB[u] = nvertices * (ACB[u]/count);
					Flag[u] = true;
				}
			} // End If 
			
		} // End While
		
		// Clear data for the next run
		PredList.clear(); 
		sigma.clear(); 
		delta.clear(); 
		for (j=0; j < nvertices; j++) 
			nodeVector[j]->Set_key(ULONG_MAX); 
		
	} // End For 
	
	for (i=0; i < nvertices; i++) {
		if (!Flag[i]) {
			ACB[i] = nvertices * (ACB[i]/numSample); 
		}
	}
	
	// End time after Brandes' algorithm and the time difference
	time(&end); 
	time_dif = difftime(end, start); 
	cout << "It took " << time_dif << " seconds to calculate Adaptive Sampling Based Approximate Centrality Values in a weighted graph" << endl;
	
	// Deallocate memory 
	for (i=0; i < nvertices; i++) 
		delete nodeVector[i]; 
	return; 
	
} // End of Adaptive_Sampling_Weighted 
예제 #2
0
/* 
 * Brandes Algorithm for weighted graphs
 */ 
void BrandesAlgorithm_Weighted(f64 CB[], NETWORK *network, f64 &time_dif) { 
	
	ui64 i, j, u, v;
	ui64 nvertices = (ui64) network->nvertices;	// The number of vertices in the network
	f64 u_distance, v_distance, edgeWeight;		// Variables to store distance estimates or edge weights
	
	time_t start, end;							// Time variables
	
	vector<ui64> sigma;							// sigma is the number of shortest paths
	vector<f64> delta;							// A vector storing dependency of the source vertex on all other vertices
	vector< vector <ui64> > PredList;			// A list of predecessors of all vertices 
	
	stack <ui64> S;								// A stack containing vertices in the order found by Dijkstra's Algorithm
	
	FibHeap PQueue;								// A priority queue storing vertices
	FibHeapNode nodeTemp;						// A particular node stored in the priority queue
	FibHeapNode *nodePtr;						// Pointer to a vertex element stored in the priority queue
	vector<FibHeapNode *> nodeVector;			// A vector of all priority queue elements 
	
	// Set the start time of Brandes' Algorithm
	time(&start); 

	nodeVector.assign ( nvertices, NULL );
	for (i=0; i < nvertices; i++) {
		nodeVector[i] = new FibHeapNode(); 
		nodeVector[i]->Set_vertexPosition(i); 
		//Set all Nodes distance to unsigned long max ULONG_MAX that is assumed to be infinity
		nodeVector[i]->Set_key(ULONG_MAX); 
	}
	
	// Compute Betweenness Centrality for every vertex i
	for (i=0; i < nvertices; i++) {
		
		/* Initialize */ 
		PredList.assign(nvertices, vector <ui64> (0, 0)); 
		sigma.assign(nvertices, 0); 
		sigma[i] = 1; 
		delta.assign(nvertices, 0); 
		
		nodeVector[i]->Set_key(0); 
		PQueue.Insert(nodeVector[i]); 
		
		// While the priority queue is nonempty 
		while (PQueue.GetNumNodes() != 0) {
			// Get the element in the priority queue with the minimum key 
			nodePtr = PQueue.ExtractMin(); 
			// Get the vertex corresponding to the queue element with the minimum key
			u = nodePtr->Get_vertexPosition(); 
			// Push u onto the stack S. Needed later for betweenness computation
			S.push(u); 
			// Shortest path distance from source i to vertex u
			u_distance = nodeVector[u]->Get_key(); 
			// Iterate over all the neighbors of u 
			for (j=0; j < (ui64) network->vertex[u].degree; j++) { 
				// Get the neighbor v of vertex u
				v = (ui64) network->vertex[u].edge[j].target; 
				// Get the weight of the edge (u,v) 
				edgeWeight = (f64) network->vertex[u].edge[j].weight; 
				// If v's shortest path distance estimate has not been set yet, then 
				// set the distance estimate of v and store v in the priority queue
				if (nodeVector[v]->Get_key() == ULONG_MAX) {
					nodeVector[v]->Set_key(u_distance + edgeWeight); 
					PQueue.Insert(nodeVector[v]); 
				}
				// Get the current shortest path distance estimate of v
				v_distance = nodeVector[v]->Get_key(); 
				
				/* Relax and Count */ 
				if (v_distance == u_distance + edgeWeight) { 
					sigma[v] += sigma[u]; 
					PredList[v].push_back(u); 
				}
				if (v_distance > u_distance + edgeWeight) {
					sigma[v] = sigma[u]; 
					PredList[v].clear(); 
					PredList[v].push_back(u); 
					nodeTemp.Set_vertexPosition(v); 
					nodeTemp.Set_key(u_distance + edgeWeight); 
					if (PQueue.DecreaseKey(nodeVector[v], nodeTemp) != 0) 
						cout << "Error decreasing the node key" << endl; 
				}
				
			} // End For 
			
		} // End While
					

		/* Accumulation */ 
		while (!S.empty()) { 
			u = S.top(); 
			S.pop(); 
			for (j=0; j < PredList[u].size(); j++) {
				delta[PredList[u][j]] += ((f64) sigma[PredList[u][j]]/sigma[u]) * (1+delta[u]); 
			}
			if (u != i) 
				CB[u] += delta[u]; 
		}
		
		// Clear data for the next run
		PredList.clear(); 
		sigma.clear(); 
		delta.clear(); 
		for (j=0; j < nvertices; j++) 
			nodeVector[j]->Set_key(ULONG_MAX); 
		
	} // End For 
	
	// End time after Brandes' algorithm and the time difference
	time(&end); 
	time_dif = difftime(end, start); 
	cout << "It took " << time_dif << " seconds to calculate Betweenness Centrality in an weighted graph" << endl; 
	
	// Deallocate memory 
	for (i=0; i < nvertices; i++) 
		delete nodeVector[i]; 
	return; 
	
} // End of BrandesAlgorithm_Weighted