bool parcHashMap_Remove(PARCHashMap *hashMap, const PARCObject *key) { PARCHashCode keyHash = parcObject_HashCode(key); int bucket = keyHash % hashMap->capacity; bool result = false; if (hashMap->buckets[bucket] != NULL) { PARCIterator *iterator = parcLinkedList_CreateIterator(hashMap->buckets[bucket]); while (parcIterator_HasNext(iterator)) { _PARCHashMapEntry *entry = parcIterator_Next(iterator); if (parcObject_Equals(key, entry->key)) { parcIterator_Remove(iterator); hashMap->size--; result = true; break; } } parcIterator_Release(&iterator); } // When expanded by 2 the load factor goes from .75 (3/4) to .375 (3/8), if // we compress by 2 when the load factor is .25 (1/4) the load // factor becomes .5 (1/2). double loadFactor = (double)hashMap->size/(double)hashMap->capacity; if (loadFactor <= (hashMap->minLoadFactor)) { _parcHashMap_Resize(hashMap, hashMap->capacity / 2); } return result; }
bool parcSortedList_Remove(PARCSortedList *list, const PARCObject *object) { bool result = false; PARCIterator *iterator = parcSortedList_CreateIterator(list); while (parcIterator_HasNext(iterator)) { PARCObject *o = parcIterator_Next(iterator); if (parcObject_Equals(object, o)) { parcIterator_Remove(iterator); result = true; break; } } parcIterator_Release(&iterator); return result; }