int erase_links(deque<set<int> > & E, const deque<deque<int> > & member_list, const bool excess, const bool defect, const double mixing_parameter) {
    
	
	int num_nodes= member_list.size();
	
	int eras_add_times=0;
	
	if (excess) {
		
		for (int i=0; i<num_nodes; i++) {
			
			
			while ( (E[i].size()>1) &&  double(internal_kin(E, member_list, i))/E[i].size() < 1 - mixing_parameter) {
                
                //---------------------------------------------------------------------------------
				
				
				cout<<"degree sequence changed to respect the option -sup ... "<<++eras_add_times<<endl;
				
				deque<int> deqar;
				for (set<int>::iterator it_est=E[i].begin(); it_est!=E[i].end(); it_est++)
					if (!they_are_mate(i, *it_est, member_list))
						deqar.push_back(*it_est);
				
				
				if(deqar.size()==E[i].size()) {	// this shouldn't happen...
                    
					cerr<<"sorry, something went wrong: there is a node which does not respect the constraints. (option -sup)"<<endl;
					return -1;
                    
				}
				
				int random_mate=deqar[irand(deqar.size()-1)];
				
				E[i].erase(random_mate);
				E[random_mate].erase(i);
				
                
			}
		}
        
	}
	
	
	
	if (defect) {
        
		for (int i=0; i<num_nodes; i++)
			while ( (E[i].size()<E.size()) &&  double(internal_kin(E, member_list, i))/E[i].size() > 1 - mixing_parameter) {
				
				//---------------------------------------------------------------------------------
                
				
				cout<<"degree sequence changed to respect the option -inf ... "<<++eras_add_times<<endl;
                
                
				int stopper_here=num_nodes;
				int stopper_=0;
				
				int random_mate=irand(num_nodes-1);
				while ( (    (they_are_mate(i, random_mate, member_list)) || E[i].find(random_mate)!=E[i].end())      &&      (stopper_<stopper_here) ) {
					
					random_mate=irand(num_nodes-1);
					stopper_++;
                    
                    
				}
				
				if(stopper_==stopper_here) {	// this shouldn't happen...
                    
					cerr<<"sorry, something went wrong: there is a node which does not respect the constraints. (option -inf)"<<endl;
					return -1;
                    
				}
				
				
				
				E[i].insert(random_mate);
				E[random_mate].insert(i);
				
                
                
			}
        
		
	}
    
	//------------------------------------ Erasing links   ------------------------------------------------------
    
	
    
    
	return 0;
	
}
int weights(deque<set<int> > & en, const deque<deque<int> > & member_list, const double beta, const double mu, deque<map <int, double > > & neigh_weigh) {

	
	
	double tstrength=0;
	
	
	for(int i=0; i<en.size(); i++)
		tstrength+=pow(en[i].size(), beta);
		
	
	
	double strs[en.size()]; // strength of the nodes
	// build a matrix like this: deque < map <int, double > > each row corresponds to link - weights 

	
	
	for(int i=0; i<en.size(); i++) {
		
		map<int, double> new_map;
		neigh_weigh.push_back(new_map);
		
		for (set<int>::iterator its=en[i].begin(); its!=en[i].end(); its++)
			neigh_weigh[i].insert(make_pair(*its, 0.));
		
		strs[i]=pow(double(en[i].size()), beta);
		
	}
	
	
	
	
	deque<double> s_in_out_id_row(3);
	s_in_out_id_row[0]=0;
	s_in_out_id_row[1]=0;
	s_in_out_id_row[2]=0;
	
	
	deque<deque<double> > wished;	// 3 numbers for each node: internal, idle and extra strength. the sum of the three is strs[i]. wished is the theoretical, factual the factual one.
	deque<deque<double> > factual;
	
	
	
	for (int i=0; i<en.size(); i++) {
		
		wished.push_back(s_in_out_id_row);
		factual.push_back(s_in_out_id_row);
		
	}
	
	
	
	double tot_var=0;
	

	for (int i=0; i<en.size(); i++) {
		
		wished[i][0]=(1. -mu)*strs[i];
		wished[i][1]=mu*strs[i];
		
		factual[i][2]=strs[i];
		
		tot_var+= wished[i][0] * wished[i][0] + wished[i][1] * wished[i][1] + strs[i] * strs[i];
	
	}
	
	
	deque<int> internal_kin_top;
	for(int i=0; i<en.size(); i++)
		internal_kin_top.push_back(internal_kin(en, member_list, i));

	
	
	double precision = 1e-9;
	double precision2 = 1e-2;
	double not_better_than = pow(tstrength, 2) * precision;
	//cout<<"tot_var "<<tot_var<<";\tnot better "<<not_better_than<<endl;
	
	
	
	int step=0;
	
	while (true) {
	
		
		

		time_t t0=time(NULL);
		
		double pre_var=tot_var;
		
		
		for (int i=0; i<en.size(); i++) 
			propagate(neigh_weigh, member_list, wished, factual, i, tot_var, strs, internal_kin_top);
		
		
		//check_weights(neigh_weigh, member_list, wished, factual, tot_var, strs);
		
		double relative_improvement=double(pre_var - tot_var)/pre_var;		
		//cout<<"tot_var "<<tot_var<<"\trelative improvement: "<<relative_improvement<<endl;

		if (tot_var<not_better_than)
				break;
		
		if (relative_improvement < precision2)
			break;
		
		time_t t1= time(NULL);
		int deltat= t1 - t0;
		
		/*
		if(step%2==0 && deltat !=0)		
			cout<<"About "<<cast_int((log(not_better_than) -log(tot_var)) / log(1. - relative_improvement)) * deltat<<" secs..."<<endl;
		*/
		step++;
		
	}
	
	
	//check_weights(neigh_weigh, member_list, wished, factual, tot_var, strs);

	

	
	

	
	
	return 0;

}
int print_network(deque<set<int> > & E, const deque<deque<int> > & member_list, const deque<deque<int> > & member_matrix, deque<int> & num_seq) {

	
	int edges=0;

		
	int num_nodes=member_list.size();
	
	deque<double> double_mixing;
	for (int i=0; i<E.size(); i++) {
		
		double one_minus_mu = double(internal_kin(E, member_list, i))/E[i].size();
		
		double_mixing.push_back(1.- one_minus_mu);
				
		edges+=E[i].size();
		
	}
	
	
	//cout<<"\n----------------------------------------------------------"<<endl;
	//cout<<endl;
	
	
	
	double density=0; 
	double sparsity=0;
	
	for (int i=0; i<member_matrix.size(); i++) {

		double media_int=0;
		double media_est=0;
		
		for (int j=0; j<member_matrix[i].size(); j++) {
			
			
			double kinj = double(internal_kin_only_one(E[member_matrix[i][j]], member_matrix[i]));
			media_int+= kinj;
			media_est+=E[member_matrix[i][j]].size() - double(internal_kin_only_one(E[member_matrix[i][j]], member_matrix[i]));
					
		}
		
		double pair_num=(member_matrix[i].size()*(member_matrix[i].size()-1));
		double pair_num_e=((num_nodes-member_matrix[i].size())*(member_matrix[i].size()));
		
		if(pair_num!=0)
			density+=media_int/pair_num;
		if(pair_num_e!=0)
			sparsity+=media_est/pair_num_e;
		
		
	
	}
	
	density=density/member_matrix.size();
	sparsity=sparsity/member_matrix.size();
	
	
	


	ofstream out1("network.dat");
	for (int u=0; u<E.size(); u++) {

		set<int>::iterator itb=E[u].begin();
	
		while (itb!=E[u].end())
			out1<<u+1<<"\t"<<*(itb++)+1<<endl;
		
		

	}
	out1.close();	

	
	ofstream out2("community.dat");

	for (int i=0; i<member_list.size(); i++) {
		
		out2<<i+1<<"\t";
		for (int j=0; j<member_list[i].size(); j++)
			out2<<member_list[i][j]+1<<" ";
		out2<<endl;
	
	}
    out2.close();
    
	cout<<"\n\n---------------------------------------------------------------------------"<<endl;
	
	
	cout<<"network of "<<num_nodes<<" vertices and "<<edges/2<<" edges"<<";\t average degree = "<<double(edges)/num_nodes<<endl;
	cout<<"\naverage mixing parameter: "<<average_func(double_mixing)<<" +/- "<<sqrt(variance_func(double_mixing))<<endl;
	cout<<"p_in: "<<density<<"\tp_out: "<<sparsity<<endl;

	
	
	ofstream statout("statistics.dat");
	
	deque<int> degree_seq;
	for (int i=0; i<E.size(); i++)
		degree_seq.push_back(E[i].size());
	
	statout<<"degree distribution (probability density function of the degree in logarithmic bins) "<<endl;
	log_histogram(degree_seq, statout, 10);
	statout<<"\ndegree distribution (degree-occurrences) "<<endl;
	int_histogram(degree_seq, statout);
	statout<<endl<<"--------------------------------------"<<endl;

		
	statout<<"community distribution (size-occurrences)"<<endl;
	int_histogram(num_seq, statout);
	statout<<endl<<"--------------------------------------"<<endl;

	statout<<"mixing parameter"<<endl;
	not_norm_histogram(double_mixing, statout, 20, 0, 0);
	statout<<endl<<"--------------------------------------"<<endl;
	
	
	
    statout.close();

	cout<<endl<<endl;

	return 0;

}
int print_network(deque<set<int> > & E, const deque<deque<int> > & member_list, const deque<deque<int> > & member_matrix, 
	deque<int> & num_seq, deque<map <int, double > > & neigh_weigh, double beta, double mu, double mu0) {

	
	int edges=0;

	
	int num_nodes=member_list.size();
	
	deque<double> double_mixing;
	for (int i=0; i<E.size(); i++) {
		
		double one_minus_mu = double(internal_kin(E, member_list, i))/E[i].size();
		
		double_mixing.push_back(1.- one_minus_mu);
				
		edges+=E[i].size();
		
	}
	
	
	//cout<<"\n----------------------------------------------------------"<<endl;
	//cout<<endl;
	
		
	double density=0; 
	double sparsity=0;
	
	for (int i=0; i<member_matrix.size(); i++) {

		double media_int=0;
		double media_est=0;
		
		for (int j=0; j<member_matrix[i].size(); j++) {
			
			
			double kinj = double(internal_kin_only_one(E[member_matrix[i][j]], member_matrix[i]));
			media_int+= kinj;
			media_est+=E[member_matrix[i][j]].size() - double(internal_kin_only_one(E[member_matrix[i][j]], member_matrix[i]));
					
		}
		
		double pair_num=(member_matrix[i].size()*(member_matrix[i].size()-1));
		double pair_num_e=((num_nodes-member_matrix[i].size())*(member_matrix[i].size()));
		
		if(pair_num!=0)
			density+=media_int/pair_num;
		if(pair_num_e!=0)
			sparsity+=media_est/pair_num_e;
		
		
	
	}
	
	density=density/member_matrix.size();
	sparsity=sparsity/member_matrix.size();
	
	
	


	ofstream out1("network.dat");
	for (int u=0; u<E.size(); u++) {

		set<int>::iterator itb=E[u].begin();
	
		while (itb!=E[u].end())
			out1<<u+1<<"\t"<<*(itb++)+1<<"\t"<<neigh_weigh[u][*(itb)]<<endl;
		
		

	}
		

	
	ofstream out2("community.dat");

	for (int i=0; i<member_list.size(); i++) {
		
		out2<<i+1<<"\t";
		for (int j=0; j<member_list[i].size(); j++)
			out2<<member_list[i][j]+1<<" ";
		out2<<endl;
	
	}

	cout<<"\n\n---------------------------------------------------------------------------"<<endl;
	
	
	cout<<"network of "<<num_nodes<<" vertices and "<<edges/2<<" edges"<<";\t average degree = "<<double(edges)/num_nodes<<endl;
	cout<<"\naverage mixing parameter (topology): "<< average_func(double_mixing)<<" +/- "<<sqrt(variance_func(double_mixing))<<endl;
	cout<<"p_in: "<<density<<"\tp_out: "<<sparsity<<endl;

	
	
	ofstream statout("statistics.dat");
	
	deque<int> degree_seq;
	for (int i=0; i<E.size(); i++)
		degree_seq.push_back(E[i].size());
	
	statout<<"degree distribution (probability density function of the degree in logarithmic bins) "<<endl;
	log_histogram(degree_seq, statout, 10);
	statout<<"\ndegree distribution (degree-occurrences) "<<endl;
	int_histogram(degree_seq, statout);
	statout<<endl<<"--------------------------------------"<<endl;

		
	statout<<"community distribution (size-occurrences)"<<endl;
	int_histogram(num_seq, statout);
	statout<<endl<<"--------------------------------------"<<endl;

	statout<<"mixing parameter (topology)"<<endl;
	not_norm_histogram(double_mixing, statout, 20, 0, 0);
	statout<<endl<<"--------------------------------------"<<endl;
	
	
	//*
	
	deque<double> inwij;
	deque<double> outwij;
	//deque<double> inkij;
	//deque<double> outkij;
	
	double csi=(1. - mu) / (1. - mu0);
	double csi2=mu /mu0;
	
	
	double tstrength=0;
	deque<double> one_minus_mu2;
	
	for(int i=0; i<neigh_weigh.size(); i++) {
		
		
		double internal_strength_i=0;
		double strength_i=0;
		
		for(map<int, double>::iterator itm = neigh_weigh[i].begin(); itm!=neigh_weigh[i].end(); itm++) {
			
			
			if(they_are_mate(i, itm->first, member_list)) {
				
				inwij.push_back(itm->second);
				//inkij.push_back(csi * pow(E[i].size(), beta-1));
				internal_strength_i+=itm->second;

				
			}
			else {
				
				outwij.push_back(itm->second);
				//outkij.push_back(csi2 * pow(E[i].size(), beta-1));
			
			
			}
			
			tstrength+=itm->second;
			strength_i+=itm->second;
		
		
		}
		
		one_minus_mu2.push_back(1 - internal_strength_i/strength_i);
		
	}
	
	
	//cout<<"average strength "<<tstrength / E.size()<<"\taverage internal strenght: "<<average_internal_strenght<<endl;
	cout<<"\naverage mixing parameter (weights): "<<average_func(one_minus_mu2)<<" +/- "<<sqrt(variance_func(one_minus_mu2))<<endl;	
	statout<<"mixing parameter (weights)"<<endl;
	not_norm_histogram(one_minus_mu2, statout, 20, 0, 0);
	statout<<endl<<"--------------------------------------"<<endl;

	
	//cout<<" expected internal "<<tstrength * (1 - mu) / E.size()<<endl;
	//cout<<"internal links: "<<inwij.size()<<" external: "<<outwij.size()<<endl;
	
	/*
	ofstream hout1("inwij.dat");
	not_norm_histogram(inwij, hout1, 20, 0, 0);
	ofstream hout2("outwij.dat");
	not_norm_histogram(outwij, hout2, 20, 0, 0);
	ofstream hout3("corrin.dat");
	not_norm_histogram_correlated(inkij, inwij, hout3, 20, 0, 0);
	ofstream hout4("corrout.dat");
	not_norm_histogram_correlated(outkij, outwij, hout4, 20, 0, 0);
	
	//*/
	
	//*/
	
	cout<<"average weight of an internal link "<<average_func(inwij)<<" +/- "<<sqrt(variance_func(inwij))<<endl;
	cout<<"average weight of an external link "<<average_func(outwij)<<" +/- "<<sqrt(variance_func(outwij))<<endl;


	//cout<<"average weight of an internal link expected "<<tstrength / edges * (1. - mu) / (1. - mu0)<<endl;
	//cout<<"average weight of an external link expected "<<tstrength / edges * (mu) / (mu0)<<endl;
	
	
	statout<<"internal weights (weight-occurrences)"<<endl;
	not_norm_histogram(inwij, statout, 20, 0, 0);
	statout<<endl<<"--------------------------------------"<<endl;
	
	
	statout<<"external weights (weight-occurrences)"<<endl;
	not_norm_histogram(outwij, statout, 20, 0, 0);

	


	cout<<endl<<endl;

	return 0;

}