PARCHashMap * parcHashMap_Put(PARCHashMap *hashMap, const PARCObject *key, const PARCObject *value) { // When expanded by 2 the load factor goes from .75 (3/4) to .375 (3/8), if // we compress by 2 when the load factor is .25 (1/4) the load // factor becomes .5 (1/2). double loadFactor = (double)hashMap->size/(double)hashMap->capacity; if (loadFactor >= hashMap->maxLoadFactor) { _parcHashMap_Resize(hashMap, hashMap->capacity * 2); } _PARCHashMapEntry *entry = _parcHashMap_GetEntry(hashMap, key); if (entry != NULL) { if (entry->value != value) { parcObject_Release(&entry->value); entry->value = parcObject_Acquire(value); } } else { entry = _parcHashMapEntry_Create(key, value); PARCHashCode keyHash = parcObject_HashCode(key); int bucket = keyHash % hashMap->capacity; if (hashMap->buckets[bucket] == NULL) { hashMap->buckets[bucket] = parcLinkedList_Create(); } parcLinkedList_Append(hashMap->buckets[bucket], entry); hashMap->size++; _parcHashMapEntry_Release(&entry); } return hashMap; }
static void _parcHashMap_Resize(PARCHashMap *hashMap, size_t newCapacity) { if (newCapacity < hashMap->minCapacity) { return; } PARCLinkedList **newBuckets = parcMemory_AllocateAndClear(newCapacity * sizeof(PARCLinkedList*)); for (unsigned int i = 0; i < hashMap->capacity; i++) { if (hashMap->buckets[i] != NULL) { if (!parcLinkedList_IsEmpty(hashMap->buckets[i])) { PARCIterator *elementIt = parcLinkedList_CreateIterator(hashMap->buckets[i]); while (parcIterator_HasNext(elementIt)) { _PARCHashMapEntry *entry = parcIterator_Next(elementIt); PARCHashCode keyHash = parcObject_HashCode(entry->key); int newBucket = keyHash % newCapacity; if (newBuckets[newBucket] == NULL) { newBuckets[newBucket] = parcLinkedList_Create(); } parcLinkedList_Append(newBuckets[newBucket], entry); } parcIterator_Release(&elementIt); } parcLinkedList_Release(&hashMap->buckets[i]); } } PARCLinkedList **cleanupBuckets = hashMap->buckets; hashMap->buckets = newBuckets; hashMap->capacity = newCapacity; parcMemory_Deallocate(&cleanupBuckets); }
PARCSortedList * parcSortedList_CreateCompare(PARCSortedListEntryCompareFunction compare) { PARCSortedList *result = parcObject_CreateInstance(PARCSortedList); if (result != NULL) { result->list = parcLinkedList_Create(); result->compare = compare; } return result; }
CCNxManifestHashGroup * ccnxManifestHashGroup_Create(void) { CCNxManifestHashGroup *section = parcObject_CreateAndClearInstance(CCNxManifestHashGroup); if (section != NULL) { section->pointers = parcLinkedList_Create(); section->overallDataDigest = NULL; section->dataSize = 0; section->entrySize = 0; section->blockSize = 0; section->treeHeight = 0; section->locator = NULL; } return section; }
static void _stressTestNext(PARCSecureRandom *rng) { PARCLinkedList *seen = parcLinkedList_Create(); size_t duplicates = 0; for (size_t i = 0; i < NUM_TESTS; i++) { uint32_t next = parcSecureRandom_Next(rng); PARCBuffer *buffer = parcBuffer_Allocate(sizeof(next)); parcBuffer_Flip(parcBuffer_PutUint32(buffer, next)); if (parcLinkedList_Contains(seen, buffer)) { duplicates++; } else { parcLinkedList_Append(seen, buffer); } parcBuffer_Release(&buffer); } assertFalse(duplicates > (NUM_TESTS * EPSILON), "The RNG failed to generate random values: saw %zu duplicates", duplicates); parcLinkedList_Release(&seen); }
static void _stressTestNextBytes(PARCSecureRandom *rng) { PARCLinkedList *seen = parcLinkedList_Create(); size_t duplicates = 0; for (size_t i = 0; i < NUM_TESTS; i++) { PARCBuffer *buffer = parcBuffer_Allocate(32); int numBytes = parcSecureRandom_NextBytes(rng, buffer); assertTrue(numBytes == 32, "Expected 32 bytes from the RNG, got %d", numBytes); if (parcLinkedList_Contains(seen, buffer)) { duplicates++; } else { parcLinkedList_Append(seen, buffer); } parcBuffer_Release(&buffer); } assertFalse(duplicates > (NUM_TESTS * EPSILON), "The RNG failed to generate random values: saw %zu duplicates", duplicates); parcLinkedList_Release(&seen); }