int numSimilarGroups(vector<string>& A) { int res = 0; unordered_map<string,string> ancestors; for(int i=0; i<A.size(); ++i) { auto lhs_anc_iter = ancestors.find(A[i]); for(int j=i+1; j<A.size(); ++j) { if(isgroup(A[i], A[j])) { if (lhs_anc_iter!=ancestors.end()) { group[lhs_anc_iter->second].insert(A[j]); ancestors[A[j]]=lhs_anc_iter->second; } else { auto rhs_anc_iter = ancestors.find(A[j]); if (rhs_anc_iter!=ancestors.end()) { group[rhs_anc_iter->second].insert(A[i]); ancestors[A[i]]=rhs_anc_iter->second; } else { group[A[i]].insert(A[j]); ancestors[A[j]] = A[i]; } } } } if (group.find(A[i])==group.end() && ancestors.find(A[i]) == ancestors.end()) { ++res; //output(A[i] + "\n"); } } // merge groups int merge_cnt = 0; while(group.size()>1) { merge_cnt =0; for(auto iter: group) { auto& vec = iter.second; for(auto wd: vec) { if (mergegroup(iter.first, wd)) merge_cnt++; } } for(auto iter=group.begin(); iter!=group.end();) { if(iter->second.empty()) iter = group.erase(iter); else ++iter; } if(merge_cnt==0 ) break; } return res + group.size(); }
bool operator<(const Equation& other)const { //return quant.size()-extra_weight>other.quant.size()-other.extra_weight; return (quant.size()>other.quant.size()?true:(quant.size()==other.quant.size() && extra_weight>other.extra_weight?true:false)); }
void init_utility_matrix(const unordered_map<string, size_t> &items, const unordered_map<string, size_t> &users, vector<vector<float>>utility_matrix) { for (size_t user = 0; user < users.size(); user++) { for (size_t item = 0; item < items.size(); item++) utility_matrix[user][item] = FLOAT_NONE_VALUE; } }
int id_of(string &s, unordered_map<string, int> &mp) { auto it = mp.find(s); if (it == mp.end()) { mp.insert({s, mp.size()}); return mp.size() - 1; } else return it->second; }
unordered_map<string, int> LoadParticles::read_plyBinaryHeader(FILE *data, unordered_map<string, int> parameters) { parameters = {}; string line = readBinaryLine(data); if (line != "ply") { cerr << "Error: loading corrupt ply-file:"; throw 20; } line = readBinaryLine(data); if (line != "format binary_little_endian 1.0") { cerr << "Error: error loading binary ply-file:"; throw 20; } int position = 0; int nParticles = 0; while (line != "end_header") { line = readBinaryLine(data); vector<string> lineSplit; boost::trim_if(line, boost::is_any_of("\t ")); boost::split(lineSplit, line, boost::is_any_of("\t "), boost::token_compress_on); if (lineSplit[0] == "comment") continue; if (lineSplit[0] == "element") { if (lineSplit[1] == "vertex") { nParticles = stoi(lineSplit[2]); } else { cerr << "Error: only vertex element supported in ply file" << endl; cerr << "found: " << lineSplit[1] << endl; throw 20; } } if (lineSplit[0] == "property") { if (lineSplit[1] == "double") { parameters[lineSplit[2]] = position; position++; } else { cerr << "Error: in ply 'property'' only 'double' is supported" << endl; cerr << "found: " << lineSplit[1] << endl; throw 20; } } } m_timeStep = 0; m_nColumns = parameters.size(); m_nParticles = nParticles; m_chunkLength = nParticles * parameters.size(); return parameters; }
void solve(int start, string &s, vector<string> &words) { //m stores info, m1 stores current info unordered_map<string, int>::iterator it; int p1 = start, p2 = start, len = words[0].length(), cur = 0; m1.clear(); while(p2 + len - 1 < s.length()) { while(cur != m.size() && p2 + len - 1 < s.length()) { string tmp = s.substr(p2, len); it = m.find(tmp); if(it == m.end()) { p2 += len; p1 = p2; cur = 0; m1.clear(); continue; } else if(it->second == m1[tmp]) { string tmp2 = ""; while(p1 != p2 && tmp2 != tmp) { tmp2 = s.substr(p1, len); if(m1.find(tmp2) != m1.end()) { if(m1[tmp2] == m[tmp2]) cur--; m1[tmp2]--; } p1 += len; } continue; } m1[tmp]++; if(it->second == m1[tmp]) cur++; p2 += len; } while(cur == m.size() && p1 <= p2) { ans.push_back(p1); string tmp = s.substr(p1, len); it = m1.find(tmp); if(it == m1.end()) { p1 += len; continue; } it->second--; if(it->second < m[tmp]) cur--; p1 += len; } } }
void RunThread2() { //int gameDiff = D2CLIENT_GetDifficulty(); int gameDiff = 1; BnetData* pData = (*p_D2LAUNCH_BnData); char* accountNameChar = pData->szAccountName; char* charName = pData->szPlayerName; //GetCharName(); std::string accountNameCharString(accountNameChar); std::string charNameCharString(charName); string jsonData = "{\"data\":{\"AccountName\":\"" + accountNameCharString + "\",\"CharName\":\"" + charNameCharString + "\","; jsonData += "\"GameDiff\":\"" + std::to_string(gameDiff) + "\",\"CompressedData\":["; unordered_map<unsigned int, unsigned __int64>::const_iterator it; for (it = _pickedItems.begin(); it != _pickedItems.end(); it++) { jsonData += "{\"ItemKey\":\"" + std::to_string((*it).first) + "\", \"ItemValue\":\"" + std::to_string((*it).second) + "\"},"; } if (_pickedItems.size() > 0) { jsonData.pop_back(); // remove last , } jsonData += "], \"Timers\":["; unordered_map<int, int>::const_iterator it2; for (it2 = _timers.begin(); it2 != _timers.end(); it2++) { jsonData += "{\"LvlNo\":\"" + std::to_string((*it2).first) + "\", \"Time\":\"" + std::to_string((*it2).second) + "\"},"; } if (_timers.size() > 0) { jsonData.pop_back(); // remove last , } jsonData += "]}}\n"; //_oldItems.clear(); //SetCharName(""); // will be reset in core loop the f**k is this DWORD dwWrite, dwRead; bool flg = WriteFile(hPipe, jsonData.c_str(), jsonData.size(), &dwWrite, NULL); }
void TestSchemaEvolution::runComparison() { CPPUNIT_ASSERT(unique_classes_current_.size() == unique_classes_.size()); for (auto cl : unique_classes_) { std::cout << "Checking " << cl.first << " " << cl.second << std::endl; //std::cout << "unique_classes_current_.insert(std::make_pair(\"" << cl.first << "\", " << cl.second << "));" << std::endl; CPPUNIT_ASSERT(unique_classes_.find(cl.first) != unique_classes_.end()); CPPUNIT_ASSERT(unique_classes_[cl.first] == unique_classes_current_[cl.first]); } }
int main() { // freopen("in.txt","r",stdin); string name,party,club; char ins[100],*ptr; int TC,tmp,ans; TC = atoi(gets(ins)); gets(ins); while(TC--) { name_set.clear(); party_set.clear(); club_set.clear(); memset(graph,0,sizeof(graph)); N = 1; while(gets(ins) && strlen(ins)) { ptr = strtok(ins," "); name = string(ptr); ptr = strtok(NULL," "); party = string(ptr); graph[get(name_set,name)][get(party_set,party)] = 1; while(ptr = strtok(NULL," ")) { club = string(ptr); graph[source()][get(club_set,club)] = 1; graph[get(club_set,club)][get(name_set,name)] = 1; } } tmp = (club_set.size() - 1) / 2; for(unordered_map<string,int>::iterator it = party_set.begin();it != party_set.end();it++) graph[it->second][target()] = tmp; ans = 0; visited.reset(); while(tmp = maxflow(source(),INT_MAX)) { ans += tmp; visited.reset(); } if(ans != club_set.size()) printf("Impossible.\n"); else for(unordered_map<string,int>::iterator it_name = name_set.begin();it_name != name_set.end();it_name++) for(unordered_map<string,int>::iterator it_club = club_set.begin();it_club != club_set.end();it_club++) if(graph[it_name->second][it_club->second]) { printf("%s %s\n",(it_name->first).c_str(),(it_club->first).c_str()); break; } if(TC) printf("\n"); } return 0; }
void set(int key, int value) { //If the node info exists in the map, refresh the value of the node //Swap the node from the original place to the head of the list if (hmap.find(key) != hmap.end()){ ListNode *existNode = hmap[key]; existNode->val = value; deleteNode(existNode); setHead(existNode); } //else, create a new node in the list, and update the map as well else{ ListNode *createNode = new ListNode(key, value); //If the size of hash map exceeds the capacity of the cache, //delete key of the end, and add key of the newly created node to hash map //Meanwhile, remove the end node from the list, and insert the new node to the list if (hmap.size() >= capacity){ hmap.erase(end->key); deleteNode(end); setHead(createNode); } //else, add the newly created node directly to the list else setHead(createNode); hmap[key] = createNode; } }
void set(int key, int value) { if (get(key) != -1) { hashMap[key]->val = value; return; } if (hashMap.size() == capacity) { Node *old = head->next; hashMap.erase(old->key); old->next->prev = head; head->next = old->next; old->next = nullptr; old->prev = nullptr; } Node *newNode = new Node(key, value); hashMap[key] = newNode; newNode->prev = tail->prev; newNode->next = tail; newNode->prev->next = newNode; tail->prev = newNode; return; }
void put(int key, int value) { auto it = db.find(key); auto iter = itcache.find(key); if(it!=db.end()) { (it->second.second)++; it->second.first = value; int newfreq = it->second.second; int curfreq = newfreq-1; cache[curfreq].erase(iter->second); cache[newfreq].push_front(key); itcache[key] = cache[newfreq].begin(); return; } if(itcache.size() == n) { if(cache[minfreq].size() == 0) return; int keytodel = cache[minfreq].back(); cache[minfreq].pop_back(); itcache.erase(keytodel); db.erase(keytodel); } minfreq = 1; db[key] = make_pair(value, 1); cache[1].push_front(key); itcache[key] = cache[1].begin(); }
/* static */ int FontUtils::FontScore(const unordered_map<char32, inT64>& ch_map, const string& fontname, int* raw_score, vector<bool>* ch_flags) { PangoFontInfo font_info; if (!font_info.ParseFontDescriptionName(fontname)) { tprintf("ERROR: Could not parse %s\n", fontname.c_str()); } PangoFont* font = font_info.ToPangoFont(); PangoCoverage* coverage = pango_font_get_coverage(font, NULL); if (ch_flags) { ch_flags->clear(); ch_flags->reserve(ch_map.size()); } *raw_score = 0; int ok_chars = 0; for (unordered_map<char32, inT64>::const_iterator it = ch_map.begin(); it != ch_map.end(); ++it) { bool covered = (IsWhitespace(it->first) || (pango_coverage_get(coverage, it->first) == PANGO_COVERAGE_EXACT)); if (covered) { ++(*raw_score); ok_chars += it->second; } if (ch_flags) { ch_flags->push_back(covered); } } return ok_chars; }
vector<vector<int>> subsetsWithDup(unordered_map<int, int>& mapNums) { vector<vector<int>> ret = {{}}; vector<vector<int>> tret = {}; if(mapNums.size() == 0) return ret; int count = (mapNums.begin())->second; int nums = (mapNums.begin())->first; mapNums.erase(mapNums.begin()); vector<vector<int>> tmp = subsetsWithDup(mapNums); for(int i=0; i<count; i++) { vector<int> t(i+1, nums); tret.push_back(t); } for(auto t:tmp) if(t.size()) ret.push_back(t); for(auto tr:tret) ret.push_back(tr); for(auto tr : tret) { for(auto t : tmp) { if(!t.size()) continue; vector<int> tt(tr.begin(), tr.end()); tt.insert(tt.begin(), t.begin(), t.end()); ret.push_back(tt); } } return ret; }
void set(int key, int value) { auto itItemMap = m_itemMap.find(key); if (itItemMap != m_itemMap.end()) { // The item with this key exists in the // cache, so promote the item to the list // tail. Note that itItemMap->second is // updated with the new value. Promote(itItemMap->second); // Set the value. itItemMap->second->value = value; } else { // Check whether the cache has used all // its capacity. if (m_itemMap.size() >= m_capacity) { // Erase the least recently used item. m_itemMap.erase(m_items.front().key); m_items.pop_front(); } // Insert the item into the list and the key-to-list-iterator // pair into the map. m_itemMap.insert(make_pair( key, m_items.insert(m_items.end(), Item(key, value)))); } }
int gen(int m, int n, pair<int, int>& p, unordered_map<string, vector<string>>& source, unordered_map<string, string>& refer) { string pos = to_string(p.first) + "#" + to_string(p.second); refer[pos] = pos; source[pos] = vector<string>(1, pos); vector<string> List; if (p.first > 0) { string tempPos = to_string(p.first-1) + "#" + to_string(p.second); List.push_back(tempPos); } if (p.first < m-1) { string tempPos = to_string(p.first+1) + "#" + to_string(p.second); List.push_back(tempPos); } if (p.second > 0) { string tempPos = to_string(p.first) + "#" + to_string(p.second-1); List.push_back(tempPos); } if (p.second < n-1) { string tempPos = to_string(p.first) + "#" + to_string(p.second+1); List.push_back(tempPos); } for (string l : List) comb(source, refer, l, pos); return source.size(); }
void Write(const unordered_map<TKey, TVal> &val) { Write(val.size()); for (const auto& item : val) { Write(item.first); Write(item.second); } }
void set(int key, int value) { if (key_node_table.count(key)) { //found in the cache Node* node = key_node_table[key]; node->value = value; key_node_table[key] = node; node->prev->next = node->next; node->next->prev = node->prev; appendTail(node); return; } else { //not in the cache if (key_node_table.size() == cap) { //if full, remove one //remove head Node* tmp = head->next; head->next = head->next->next; head->next->prev = head; key_node_table.erase(tmp->key); } //else if not full, skip remvoving Node* node = new Node(key, value); appendTail(node); key_node_table[key] = node; } }
void JoinOperator::updateExpression(Expression *exp, unordered_map<string, Expression*> lm, unordered_map<string, Expression*> rm , string ltable, string rtable) { ExprType t = exp->getType(); if(t != ExprType::COLEXPRESSION && t != ExprType::DOUBLEVALUEEXPRESSION && t != ExprType::STRINGVALUEEXPRESSION && t != ExprType::LONGVALUEEXPRESSION && t != ExprType::DATEVALUEEXPRESSION){ BinaryExpression* bexp = (BinaryExpression*)exp; updateExpression(bexp->getLeftExpression(), lm, rm, ltable, rtable); updateExpression(bexp->getRightExpression(), lm, rm, ltable, rtable); } else if(t == COLEXPRESSION){ ColExpression* col = (ColExpression*)exp; if(lm.find(col->getQualifiedName()) != lm.end()){ ColExpression* e = (ColExpression*)lm[col->getQualifiedName()]; col->setType(e->getDataType()); col->setColPos(e->getColPos()); left.push_back(col); } else if(rm.find(col->getQualifiedName()) != rm.end()){ ColExpression* e = (ColExpression*)rm[col->getQualifiedName()]; col->setType(e->getDataType()); col->setColPos(e->getColPos() + (int)lm.size()); ColExpression *rcol = new ColExpression(col->getQualifiedName(), col->getColPos()-(int)lm.size(), col->getDataType()); right.push_back(rcol); } else { std::cout << "column : " << col->getQualifiedName() << " not found in any schema " << std::endl; } } }
void set(int key, int value) { if (capacity == 0) { return ; } unordered_map<int,QNode*>::iterator itr; QNode *tmp; itr = keyAddress.find(key); if (itr == keyAddress.end()) { tmp = new QNode(key, value); keyAddress.insert(pair<int, QNode*>(key, tmp)); } else { tmp = itr->second; tmp->value = value; tmp->pre->next = tmp->next; tmp->next->pre = tmp->pre; } if (keyAddress.size() > capacity) { keyAddress.erase(rear.pre->key); rear.next = rear.pre; rear.pre = rear.pre->pre; rear.pre->next = &rear; delete rear.next; rear.next = NULL; } tmp->next = front.next; tmp->next->pre = tmp; front.next = tmp; tmp->pre = &front; }
/*Sets the key x with value y in the LRU cache */ void LRUCache::set(int x, int y) { if(m.count(x)==1) { node *temp = m[x]; temp->value = y; remove(temp); setHead(temp); } else { node* n = new node(); n->key = x; n->value = y; n->next = n->prev = NULL; if(m.size()>=cap) { m.erase(last->key); remove(last); setHead(n); } else { setHead(n); } m[x] = n; } }
void write(ofstream& fout, const unordered_map<string, double>& imMap) { size_t imNum = imMap.size(); fout.write((char *)&imNum, sizeof(size_t)); for (const auto& kv : imMap) write(fout, kv.first); }
void save_help_wanted(fstream& lbfile) { save_primitive<int>(lbfile, ai_detail::next_job_id); save_primitive<int>(lbfile, jobs_board.size()); for (auto it = jobs_board.begin(); it != jobs_board.end(); ++it) { save_primitive<int>(lbfile, it->second.type); save_primitive<int>(lbfile, it->second.job_id); save_primitive<uint8_t>(lbfile, it->second.current_step); save_primitive<int>(lbfile, it->second.assigned_to); save_primitive<int>(lbfile, it->second.steps.size()); for (auto steps = it->second.steps.begin(); steps != it->second.steps.end(); ++steps) { save_primitive<int>(lbfile, steps->type); save_primitive<int16_t>(lbfile, steps->target_x); save_primitive<int16_t>(lbfile, steps->target_y); save_primitive<uint8_t>(lbfile, steps->target_z); save_primitive<int>(lbfile, steps->component_id); save_primitive<bool>(lbfile, steps->requires_skill); save_primitive<string>(lbfile, steps->skill_name); save_primitive<int>(lbfile, steps->placeholder_structure_id); save_primitive<char>(lbfile, steps->required_skill_difficulty); } } }
void set(int key, int value) { if (cache.empty()){ o_key=key; y_key=key; cache[key].value=value; if (debug) print(); return; } if (cache.count(key)>0){ //already present cache[key].value=value; //update value bool temp_debug=debug; debug=0; get(key); debug=temp_debug; }else{ cache[key].value=value; //insert entry cache[key].older=y_key; cache[y_key].younger=key; y_key=key; if (cache.size()>capacity){ //delete the oldest one o_key=cache[o_key].younger; cache.erase(cache[o_key].older); } } if (debug) print(); }
double get_score(error_pairs word_err_list,string name) { double prob = 1.0; int word_count = word_list[name]; /* * Lets assume a naive bayes setting */ // incorporate smoothing ... char i,j,k; i = word_err_list.err_type; j = word_err_list.p1; k = word_err_list.p2; if(j == '@') { j = 26; } else { j = j-'a'; } k = k - 'a'; prob = (err_arr[i][j][k])*(0.5 + word_count)/(double)(word_list.size() + tot_word_count); //smoothing here return prob; }
// Caution!! // Karger's Algorithm requires choosing one edge uniformly at random. // However, I first choose a node uniformly at random, and // then choose an edge for that node uniformly at random // (I did this so I could keep just one adjList and no other data structure) // // Due to this, I am most likely NOT choosing an overall edge uniformly at random. // Algorithm still works fine though. Edge MinCut::pickEdge(const unordered_map<Node, Bucket>& myAdjList) { //pick node assert(myAdjList.size() > 0); int alIndex = std::rand() % (myAdjList.size()); auto alIt = myAdjList.begin(); for (int i=0; i<alIndex; i++) {alIt++;} //pick outgoing edge from this node int nedges = (alIt->second).size(); assert(nedges > 0); int eIndex = std::rand() % nedges; auto ndIt = (alIt->second).begin(); for (int i=0; i<eIndex; i++) {ndIt++;} Edge ret = make_pair(alIt->first, *ndIt); return ret; }
int go2(int last, int tot, int all, unordered_map<int, vector<pii> > &cut) { if(((tot - 1 - cut.size()) % 2) && (all & (1 << last))) { for(int i = 1; i <= tot; i++) if(!cut.count(i)) return i; } return 0; }
set<string> *LanguageRecognizer::recognize(string line) { #ifdef DEBUG cout << "Got " << knowsyourlanguage.size() << " elements in trigram list" << endl; cout << "Recognizing string :" << endl << line << endl; #endif stringstream linestream(line,ios::in); return recognize((istream *)&linestream); }
int try_min_cut() { srand(time(NULL)); while (_vertices.size() > 2) { random_contradiction(); } return _edges.size(); }
//------------------------------------------------------------------------------ void Particle::connectionsPD_data( unordered_map <int, PD_connectionData> connPD_data) { _connectionsPD_data = connPD_data; _initialNumberOfBonds = connPD_data.size(); _damage = (double)_connectionsPD_data.size(); if(_initialNumberOfBonds == 0) _initialNumberOfBonds = 1; }