void LocaleKeyFactory::updateVisibleIDs(Hashtable & result, UErrorCode & status) const { const Hashtable * supported = getSupportedIDs(status); if (supported) { UBool visible = (_coverage & 0x1) == 0; const UHashElement * elem = NULL; int32_t pos = 0; while ((elem = supported->nextElement(pos)) != NULL) { const UnicodeString & id = *((const UnicodeString *)elem->key.pointer); if (!visible) { result.remove(id); } else { result.put(id, (void *)this, status); // this is dummy non-void marker used for set semantics if (U_FAILURE(status)) { break; } } } } }
/** * Remove a source-target/variant from the specDAG. */ void TransliteratorRegistry::removeSTV(const UnicodeString& source, const UnicodeString& target, const UnicodeString& variant) { // assert(source.length() > 0); // assert(target.length() > 0); UErrorCode status = U_ZERO_ERROR; Hashtable *targets = (Hashtable*) specDAG.get(source); if (targets == NULL) { return; // should never happen for valid s-t/v } uint32_t varMask = targets->geti(target); if (varMask == 0) { return; // should never happen for valid s-t/v } int32_t variantListIndex = variantList.indexOf((void*) &variant, 0); if (variantListIndex < 0) { return; // should never happen for valid s-t/v } int32_t remMask = 1 << variantListIndex; varMask &= (~remMask); if (varMask != 0) { targets->puti(target, varMask, status); } else { targets->remove(target); // should delete variants if (targets->count() == 0) { specDAG.remove(source); // should delete targets } } }
void SimpleFactory::updateVisibleIDs(Hashtable& result, UErrorCode& status) const { if (_visible) { result.put(_id, (void*)this, status); // cast away const } else { result.remove(_id); } }
void SimpleLocaleKeyFactory::updateVisibleIDs(Hashtable& result, UErrorCode& status) const { if (U_SUCCESS(status)) { if (_coverage & 0x1) { result.remove(_id); } else { result.put(_id, (void*)this, status); } } }
Handle_ptr hashremoveCommand( CBuiltinAdapter *adapter, Context &ctx, Environment *env, std::vector<Handle_ptr> args) { if (2 != args.size()) { throw ArgumentCountException( 2, __FILE__, __LINE__); } Hashtable *hashtable = asHashtable( args[0]); std::string key = args[1]->stringValue(); hashtable->remove( key); return args[0]; }
/** * Remove a source-target/variant from the specDAG. */ void TransliteratorRegistry::removeSTV(const UnicodeString& source, const UnicodeString& target, const UnicodeString& variant) { // assert(source.length() > 0); // assert(target.length() > 0); // UErrorCode status = U_ZERO_ERROR; Hashtable *targets = (Hashtable*) specDAG.get(source); if (targets == 0) { return; // should never happen for valid s-t/v } UVector *variants = (UVector*) targets->get(target); if (variants == 0) { return; // should never happen for valid s-t/v } variants->removeElement((void*) &variant); if (variants->size() == 0) { targets->remove(target); // should delete variants if (targets->count() == 0) { specDAG.remove(source); // should delete targets } } }
void rho_syncclient_create_object(const char* szModel, unsigned long hash) { Hashtable<String, String>& hashObject = *((Hashtable<String, String>*)hash); String src_name = szModel; DBResult( res, db::CDBAdapter::getUserDB().executeSQL("SELECT source_id, partition, schema, sync_type from sources WHERE name=?", src_name) ); if ( res.isEnd()) { //TODO: report error - unknown source return; } String update_type = "create"; int nSrcID = res.getIntByIdx(0); String obj = hashObject.containsKey("object") ? hashObject.get("object") : rhom_generate_id(); String db_partition = res.getStringByIdx(1); bool isSchemaSrc = res.getStringByIdx(2).length() > 0; bool isSyncSrc = res.getStringByIdx(3).compare("none") != 0; String tableName = isSchemaSrc ? src_name : "object_values"; db::CDBAdapter& db = db::CDBAdapter::getDB(db_partition.c_str()); hashObject.put("source_id", convertToStringA(nSrcID)); hashObject.put("object", obj); db.startTransaction(); if ( isSchemaSrc ) db_insert_into_table(db, tableName, hashObject, "source_id"); if ( isSyncSrc || !isSchemaSrc ) { for ( Hashtable<String,String>::iterator it = hashObject.begin(); it != hashObject.end(); ++it ) { String key = it->first; String val = it->second; if ( rhom_method_name_isreserved(key) ) continue; // add rows excluding object, source_id and update_type Hashtable<String,String> fields; fields.put("source_id", convertToStringA(nSrcID)); fields.put("object", obj); fields.put("attrib", key); fields.put("value", val); fields.put("update_type", update_type); if ( db::CDBAdapter::getDB(db_partition.c_str()).getAttrMgr().isBlobAttr(nSrcID, key.c_str()) ) fields.put( "attrib_type", "blob.file"); if ( isSyncSrc ) db_insert_into_table(db, "changed_values", fields); fields.remove("update_type"); fields.remove("attrib_type"); if ( !isSchemaSrc ) db_insert_into_table(db, tableName, fields); } } db.endTransaction(); }
void rho_syncclient_save( const char* szModel, unsigned long hash ) { Hashtable<String, String>& hashObject = *((Hashtable<String, String>*)hash); String src_name = szModel; DBResult( res, db::CDBAdapter::getUserDB().executeSQL("SELECT source_id, partition, schema, sync_type from sources WHERE name=?", src_name) ); if ( res.isEnd()) { //TODO: report error - unknown source return; } int nSrcID = res.getIntByIdx(0); String obj = hashObject.get("object"); String db_partition = res.getStringByIdx(1); bool isSchemaSrc = res.getStringByIdx(2).length() > 0; bool isSyncSrc = res.getStringByIdx(3).compare("none") != 0; String tableName = isSchemaSrc ? src_name : "object_values"; db::CDBAdapter& db = db::CDBAdapter::getDB(db_partition.c_str()); db.startTransaction(); String update_type = ""; String sql; Vector<String> arValues; if (isSchemaSrc) { sql = "SELECT object FROM " + tableName + " WHERE object=? LIMIT 1 OFFSET 0"; arValues.addElement(obj); } else { sql = "SELECT object FROM " + tableName + " WHERE object=? AND source_id=? LIMIT 1 OFFSET 0"; arValues.addElement(obj); arValues.addElement(convertToStringA(nSrcID)); } DBResult( res1, db.executeSQLEx(sql.c_str(), arValues ) ); if (!res1.isEnd()) { if (isSyncSrc) { DBResult( resUpdateType, db.executeSQL( "SELECT update_type FROM changed_values WHERE object=? and source_id=? and sent=?", obj, nSrcID, 0 ) ); if (!resUpdateType.isEnd()) update_type = resUpdateType.getStringByIdx(0); else update_type = "update"; }else update_type = "update"; } else { rho_syncclient_create_object(szModel, hash); } if (!res.isEnd()) { unsigned long item = rhom_load_item_by_object( db, src_name, nSrcID, obj, isSchemaSrc); Hashtable<String, String>& hashItem = *((Hashtable<String, String>*)item); for ( Hashtable<String,String>::iterator it = hashObject.begin(); it != hashObject.end(); ++it ) { String key = it->first; String val = it->second; if ( rhom_method_name_isreserved(key) ) continue; // add rows excluding object, source_id and update_type Hashtable<String,String> fields; fields.put("source_id", convertToStringA(nSrcID)); fields.put("object", obj); fields.put("attrib", key); fields.put("value", val); fields.put("update_type", update_type); if ( db::CDBAdapter::getDB(db_partition.c_str()).getAttrMgr().isBlobAttr(nSrcID, key.c_str()) ) fields.put( "attrib_type", "blob.file"); if ( hashItem.containsKey(key) ) { bool isModified = hashItem.get(key) != val; if (isModified) { if (isSyncSrc) { DBResult( resUpdateType, db.executeSQL( "SELECT update_type FROM changed_values WHERE object=? and attrib=? and source_id=? and sent=?", obj, key, nSrcID, 0 ) ); if (!resUpdateType.isEnd()) { fields.put("update_type", resUpdateType.getStringByIdx(0) ); db.executeSQL( "DELETE FROM changed_values WHERE object=? and attrib=? and source_id=? and sent=?", obj, key, nSrcID, 0 ); } db_insert_into_table(db, "changed_values", fields); } if ( isSchemaSrc ) db.executeSQL( (String("UPDATE ") + tableName + " SET " + key + "=? WHERE object=?").c_str(), val, obj ); else db.executeSQL( "UPDATE object_values SET value=? WHERE object=? and attrib=? and source_id=?", val, obj, key, nSrcID ); } }else { if (isSyncSrc) db_insert_into_table(db, "changed_values", fields); fields.remove("update_type"); fields.remove("attrib_type"); if (isSchemaSrc) db.executeSQL( (String("UPDATE ") + tableName + " SET " + key + "=? WHERE object=?").c_str(), val, obj ); else db_insert_into_table(db, tableName, fields); } } } db.endTransaction(); }
int main() { const unsigned int START_SIZE = 32768; unsigned int largest_size = START_SIZE * pow(2,10); unsigned int elems_tested = START_SIZE * 16; //unsigned int current_size = START_SIZE; //unsigned int old_size = 0; // int max_size = START_SIZE^(INCREMENT_FACTOR*number_of_trials); // for our outputting of the results ofstream ofs("results.txt"); // this is going to hold the measurements vector<recorder<timer> > stats(number_of_algorithms); // The "U" is the type for the queues x and y (poorly named, i know). Using the largest sequence multiplied by factor to allocate memory //EMAQueue<U> x(current_size); cout << "________"; for (int i = 0; i < number_of_algorithms; ++i) cout << headings[i]; cout << endl; cout << " Range "; for (int i = 0; i < number_of_algorithms; ++i) cout << "| Time "; cout << endl; //initialize vector of ints vector<int> testVector; //initialize vector of keys vector<int> keyVector; //initialize random stuff std::random_device rd; // obtain a random number from hardware std::mt19937 eng(rd()); // seed the generator std::uniform_int_distribution<> distr(START_SIZE, largest_size); // define the range for (unsigned int i = 0; i < elems_tested; ++i) { testVector.push_back(distr(eng)); keyVector.push_back(testVector[i]%101); } for (int count = 0; count < number_of_trials; count ++) { //displays the number of elements that will be added to the data structures cout << setw(8) << 1+ largest_size - START_SIZE << flush; ofs << setw(8) << 1+ largest_size - START_SIZE; //resets stats for (int i = 0; i < number_of_algorithms; ++i) stats[i].reset(); //start of testing for (int j = 0; j < number_of_trials; ++j) { //initialize data structures each trial Hashtable<unsigned int> emHash; unordered_map<unsigned int, unsigned int> stlMap; HashMap sepChain; hashdict<unsigned int, unsigned int> bookHash(elems_tested, -1); //does test for each algorithm for (int i = 0; i < number_of_algorithms; ++i) { //resets timer timer1.restart(); //completes the test "current_size" times for (unsigned int k = 0; k < elems_tested; ++k) { //data type operations to be tested switch (i) { //insert values to Emily's Hash case 0: emHash.insert(testVector[k]); //emHash.insert(k); break; case 1: stlMap.insert(make_pair(keyVector[k], testVector[k])); //stlMap.insert(k, k); break; /* case 2: //sepChain.insert(testVector[k]); //sepChain.insert(k); break; case 3: //bookHash.insert(keyVector[k], testVector[k]); //bookHash.insert(k, k); break; */ case 2: emHash.remove(testVector[k]); //emHash.remove(k); break; case 3: stlMap.erase(testVector[k]); //stlMap.erase(k); /* break; case 6: //sepChain.remove(testVector[k]); //sepChain.remove(k); break; case 7: bookHash.removeAny(); */ } } //stops timer timer1.stop(); //records stats stats[i].record(timer1); } //cout << "insert: " << START_SIZE << "to: " << largest_size << endl; } // end of trials loop for (int i = 0; i < number_of_algorithms; ++i) { //outputs results console stats[i].report(cout); //outputs results to file stats[i].report(ofs); } cout << endl; ofs << endl; //delete vector testVector.clear(); keyVector.clear(); largest_size = largest_size/2; //repopulate with smaller distribution std::uniform_int_distribution<> distr(START_SIZE, largest_size); // define the range for (unsigned int m = 0; m < elems_tested; ++m) { testVector.push_back(distr(eng)); keyVector.push_back(testVector[m]%101); } } return 0; }