Exemplo n.º 1
0
void getNB(HashGraph* G, int v, int distance, hash_set<int>& result, vector<bool>& mark)
{
	assert(distance==1 || distance==2);
	
	EdgeMap* p_neighbors=G->getNeighbors(v);
	EdgeMap::iterator pnb;
    for (pnb=p_neighbors->begin(); pnb!=p_neighbors->end(); pnb++)
    {
            UINT w=pnb->first;
            if(!mark[w])
            	result.insert(w);
    }
	
	if(distance==1)
		return;
	
	hash_set<int>::iterator p;
	vector<int> temp;
	for(p=result.begin(); p!=result.end(); p++)
		temp.push_back(*p);
	int imnb_size=result.size();
	//result.clear();
	for(int i=0; i<imnb_size; i++)
	{
		p_neighbors=G->getNeighbors(temp[i]);
		for (pnb=p_neighbors->begin(); pnb!=p_neighbors->end(); pnb++)
		{
			UINT w=pnb->first;
			if(!mark[w]) 
				result.insert(w); //automatically handel duplication.
		}
	}
	for(int i=0; i<imnb_size; i++)
		result.erase(temp[i]);
}
Exemplo n.º 2
0
void print(hash_set &S) {
  iterator_t p = S.begin();
  while(p!=S.end()) {
    cout << S.retrieve(p) << " ";
    p = S.next(p);
  }
  cout << endl;
}
void LinearSNNClassifier::checkgradRowSparse(const vector<Example>& examples, mat& Wd, const mat& gradWd, const string& mark, int iter,
        const hash_set<int>& sparseRowIndexes, const mat& ft) {
    //Random randWdRowcheck = new Random(iter + "Row".hashCode() + hash));
    int charseed = mark.length();
    for (int i = 0; i < mark.length(); i++) {
        charseed = (int) (mark[i]) * 5 + charseed;
    }
    srand(iter + charseed);
    std::vector<int> idRows, idCols;
    idRows.clear();
    idCols.clear();
    if (sparseRowIndexes.empty()) {
        for (int i = 0; i < Wd.n_rows; ++i)
            idRows.push_back(i);
    } else {
        hash_set<int>::iterator it;
        for (it = sparseRowIndexes.begin(); it != sparseRowIndexes.end(); ++it)
            idRows.push_back(*it);
    }

    for (int idx = 0; idx < Wd.n_cols; idx++)
        idCols.push_back(idx);

    random_shuffle(idRows.begin(), idRows.end());
    random_shuffle(idCols.begin(), idCols.end());

    int check_i = idRows[0], check_j = idCols[0];

    double orginValue = Wd(check_i, check_j);

    Wd(check_i, check_j) = orginValue + 0.001;
    double lossAdd = 0.0;
    for (int i = 0; i < examples.size(); i++) {
        Example oneExam = examples[i];
        lossAdd += computeScore(oneExam);
    }

    Wd(check_i, check_j) = orginValue - 0.001;
    double lossPlus = 0.0;
    for (int i = 0; i < examples.size(); i++) {
        Example oneExam = examples[i];
        lossPlus += computeScore(oneExam);
    }

    double mockGrad = (lossAdd - lossPlus) / (0.002 * ft(check_i, check_j));
    mockGrad = mockGrad / examples.size();
    double computeGrad = gradWd(check_i, check_j);

    printf("Iteration %d, Checking gradient for %s[%d][%d]:\t", iter, mark.c_str(), check_i, check_j);
    printf("mock grad = %.18f, computed grad = %.18f\n", mockGrad, computeGrad);

    Wd(check_i, check_j) = orginValue;
}
Exemplo n.º 4
0
int main(){
    
    for(int i=0;i<100;i++){
        tab.insert(2*i + 1);
    }
    for(hash_set<int>::iterator iter = tab.begin();
            iter!=tab.end();
            iter++){
        cout<<(*iter)<<" ";
    }
    cout<<endl;

	return 0;
}
Exemplo n.º 5
0
void Query::mapNodes(vector<Neighborhood>& nbs, HashGraph* Gdb, OrthologInfoList* pOrthinfolist_db, hash_set<int>& Sq, hash_set<int>& Sdb, GraphMatch* gm, Queue& Q, hash_set<int>& nodesInQ, vector<bool>& mark, vector<bool>& dbmark)
{
	hash_set<int>::iterator p;
	hash_map< int, vector<int>, inthash > lmap_q, lmap_db;
	for(p=Sq.begin(); p!=Sq.end(); p++)
	{
		for(unsigned int i=0; i<(*pOrthinfolist)[*p].size(); i++)
		{
			if((*pOrthinfolist)[*p][i]==0)
				continue;
			lmap_q[(*pOrthinfolist)[*p][i]].push_back(*p);
		}
	}
	
	for(p=Sdb.begin(); p!=Sdb.end(); p++)
	{
		if(!dbmark[*p])
		{
			for(unsigned int i=0; i<(*pOrthinfolist_db)[*p].size(); i++)
			{
				if((*pOrthinfolist_db)[*p][i]==0)
					continue;
				lmap_db[(*pOrthinfolist_db)[*p][i]].push_back(*p);
			}
		}
	}
		
	hash_map<int, vector<int>, inthash>::iterator iter, iter2;
	for(iter=lmap_q.begin(); iter!=lmap_q.end(); iter++)
	{
		iter2=lmap_db.find(iter->first);
		if(iter2==lmap_db.end())
			continue;
		else
			mapNodesHelp(nbs, Gdb, pOrthinfolist_db, iter->second, iter2->second, gm, Q, nodesInQ, mark, dbmark);	
	}
}
Exemplo n.º 6
0
int trim_entropy_filter(vector<string>* keep_words, hash_set<string>& cad_words_set, WordInfoMap& wordinfo_map)
{
    keep_words->reserve(cad_words_set.size());
    for (hash_set<string>::iterator it = cad_words_set.begin(); it != cad_words_set.end(); ++it) {
        WordInfoMap::iterator it_map = wordinfo_map.find(*it);
        if (it_map == wordinfo_map.end()) {
            fprintf(stderr, "WARNING, word[%s] in cad_word, not in word_info", it->c_str());
            continue;
        }
        if (it_map->first.size() <= WORD_LEN - 4 && it_map->second.calc_is_keep()) {
            keep_words->push_back(*it);
        }
    }
    return 0;
}