bool parcHashMap_Remove(PARCHashMap *hashMap, const PARCObject *key) { PARCHashCode keyHash = parcObject_HashCode(key); int bucket = keyHash % hashMap->capacity; bool result = false; if (hashMap->buckets[bucket] != NULL) { PARCIterator *iterator = parcLinkedList_CreateIterator(hashMap->buckets[bucket]); while (parcIterator_HasNext(iterator)) { _PARCHashMapEntry *entry = parcIterator_Next(iterator); if (parcObject_Equals(key, entry->key)) { parcIterator_Remove(iterator); hashMap->size--; result = true; break; } } parcIterator_Release(&iterator); } // When expanded by 2 the load factor goes from .75 (3/4) to .375 (3/8), if // we compress by 2 when the load factor is .25 (1/4) the load // factor becomes .5 (1/2). double loadFactor = (double)hashMap->size/(double)hashMap->capacity; if (loadFactor <= (hashMap->minLoadFactor)) { _parcHashMap_Resize(hashMap, hashMap->capacity / 2); } return result; }
void parcObjectTesting_AssertHashCode(const PARCObject *x, const void *y) { assertFalse(x == y, "The parameters x and y cannot be the same value."); assertTrue(parcObject_Equals(x, y), "The parameters x and y must be equal"); PARCHashCode xCode = parcObject_HashCode(x); PARCHashCode yCode = parcObject_HashCode(y); assertTrue(xCode == yCode, "Expected the HashCode of two equal objects to be equal."); }
static void _parcObjectTesting_AssertCopy(const PARCObject *instance) { PARCObject *copy = parcObject_Copy(instance); if (copy == instance) { parcObject_Release(©); assertFalse(true, "Copy should not be the same object"); } if (!parcObject_Equals(instance, copy)) { parcObject_Release(©); assertTrue(false, "Object fails Copy Test"); } parcObject_Release(©); }
bool parcSortedList_Remove(PARCSortedList *list, const PARCObject *object) { bool result = false; PARCIterator *iterator = parcSortedList_CreateIterator(list); while (parcIterator_HasNext(iterator)) { PARCObject *o = parcIterator_Next(iterator); if (parcObject_Equals(object, o)) { parcIterator_Remove(iterator); result = true; break; } } parcIterator_Release(&iterator); return result; }
static _PARCHashMapEntry * _parcHashMap_GetEntry(const PARCHashMap *hashMap, const PARCObject *key) { PARCHashCode keyHash = parcObject_HashCode(key); int bucket = keyHash % hashMap->capacity; _PARCHashMapEntry *result = NULL; if (hashMap->buckets[bucket] != NULL) { PARCIterator *iterator = parcLinkedList_CreateIterator(hashMap->buckets[bucket]); while (parcIterator_HasNext(iterator)) { _PARCHashMapEntry *entry = parcIterator_Next(iterator); if (parcObject_Equals(key, entry->key)) { result = entry; break; } } parcIterator_Release(&iterator); } return result; }
static bool _parcHashMapEntry_Equals(const _PARCHashMapEntry *a, const _PARCHashMapEntry *b) { return (parcObject_Equals(a->key, b->key) && parcObject_Equals(a->value, b->value)); }