Esempio n. 1
0
void
parcObjectTesting_AssertHashCode(const PARCObject *x, const void *y)
{
    assertFalse(x == y, "The parameters x and y cannot be the same value.");
    assertTrue(parcObject_Equals(x, y), "The parameters x and y must be equal");

    PARCHashCode xCode = parcObject_HashCode(x);
    PARCHashCode yCode = parcObject_HashCode(y);

    assertTrue(xCode == yCode, "Expected the HashCode of two equal objects to be equal.");
}
Esempio n. 2
0
void
parcObjectTesting_AssertEquals(const PARCObject *x, const void *y, const void *z, ...)
{
    va_list ap;
    va_start(ap, z);

    _parcObjectTesting_AssertEquals((bool (*)(const void *, const void *))parcObject_Equals, x, y, z, ap);

    assertTrue(parcObject_HashCode(x) == parcObject_HashCode(y),
               "HashCode of x and y must be equal");
    assertTrue(parcObject_HashCode(x) == parcObject_HashCode(z),
               "HashCode of x and z must be equal");

    va_end(ap);
}
Esempio n. 3
0
PARCHashMap *
parcHashMap_Put(PARCHashMap *hashMap, const PARCObject *key, const PARCObject *value)
{

    // When expanded by 2 the load factor goes from .75 (3/4) to .375 (3/8), if
    // we compress by 2 when the load factor is .25 (1/4) the load
    // factor becomes .5 (1/2).
    double loadFactor = (double)hashMap->size/(double)hashMap->capacity;
    if (loadFactor >= hashMap->maxLoadFactor) {
        _parcHashMap_Resize(hashMap, hashMap->capacity * 2);
    }

    _PARCHashMapEntry *entry = _parcHashMap_GetEntry(hashMap, key);

    if (entry != NULL) {
        if (entry->value != value) {
            parcObject_Release(&entry->value);
            entry->value = parcObject_Acquire(value);
        }
    } else {
        entry = _parcHashMapEntry_Create(key, value);

        PARCHashCode keyHash = parcObject_HashCode(key);
        int bucket = keyHash % hashMap->capacity;

        if (hashMap->buckets[bucket] == NULL) {
            hashMap->buckets[bucket] = parcLinkedList_Create();
        }
        parcLinkedList_Append(hashMap->buckets[bucket], entry);
        hashMap->size++;
        _parcHashMapEntry_Release(&entry);
    }

    return hashMap;
}
Esempio n. 4
0
bool
parcHashMap_Remove(PARCHashMap *hashMap, const PARCObject *key)
{
    PARCHashCode keyHash = parcObject_HashCode(key);

    int bucket = keyHash % hashMap->capacity;

    bool result = false;

    if (hashMap->buckets[bucket] != NULL) {
        PARCIterator *iterator = parcLinkedList_CreateIterator(hashMap->buckets[bucket]);

        while (parcIterator_HasNext(iterator)) {
            _PARCHashMapEntry *entry = parcIterator_Next(iterator);
            if (parcObject_Equals(key, entry->key)) {
                parcIterator_Remove(iterator);
                hashMap->size--;
                result = true;
                break;
            }
        }
        parcIterator_Release(&iterator);
    }

    // When expanded by 2 the load factor goes from .75 (3/4) to .375 (3/8), if
    // we compress by 2 when the load factor is .25 (1/4) the load
    // factor becomes .5 (1/2).
    double loadFactor = (double)hashMap->size/(double)hashMap->capacity;
    if (loadFactor <= (hashMap->minLoadFactor)) {
        _parcHashMap_Resize(hashMap, hashMap->capacity / 2);
    }

    return result;
}
Esempio n. 5
0
static void
_parcHashMap_Resize(PARCHashMap *hashMap, size_t newCapacity)
{
    if (newCapacity < hashMap->minCapacity) {
        return;
    }

    PARCLinkedList **newBuckets = parcMemory_AllocateAndClear(newCapacity * sizeof(PARCLinkedList*));

    for (unsigned int i = 0; i < hashMap->capacity; i++) {
        if (hashMap->buckets[i] != NULL) {
            if (!parcLinkedList_IsEmpty(hashMap->buckets[i])) {
                PARCIterator *elementIt = parcLinkedList_CreateIterator(hashMap->buckets[i]);
                while (parcIterator_HasNext(elementIt)) {
                    _PARCHashMapEntry *entry = parcIterator_Next(elementIt);
                    PARCHashCode keyHash = parcObject_HashCode(entry->key);
                    int newBucket = keyHash % newCapacity;
                    if (newBuckets[newBucket] == NULL) {
                        newBuckets[newBucket] = parcLinkedList_Create();
                    }
                    parcLinkedList_Append(newBuckets[newBucket], entry);
                }
                parcIterator_Release(&elementIt);
            }
            parcLinkedList_Release(&hashMap->buckets[i]);
        }
    }
    PARCLinkedList **cleanupBuckets = hashMap->buckets;
    hashMap->buckets = newBuckets;
    hashMap->capacity = newCapacity;

    parcMemory_Deallocate(&cleanupBuckets);
}
LONGBOW_TEST_CASE(Loca, _createHashableKey_Name)
{
    CCNxName *name1 = ccnxName_CreateFromURI("lci:/name/1");
    CCNxName *name2 = ccnxName_CreateFromURI("lci:/name/2");

    PARCObject *keyObj1 = _createHashableKey(name1, NULL, NULL);
    PARCObject *keyObj2 = _createHashableKey(name2, NULL, NULL);

    assertNotNull(keyObj1, "Expected non-null key object");
    assertNotNull(keyObj2, "Expected non-null key object");
    assertFalse(parcObject_HashCode(keyObj1) == 0, "Expected non zero hashcode");
    assertFalse(parcObject_HashCode(keyObj2) == 0, "Expected non zero hashcode");
    assertFalse(parcObject_HashCode(keyObj1) == parcObject_HashCode(keyObj2), "Expected different hashcodes");

    parcObject_Release((PARCObject **) &keyObj1);
    parcObject_Release((PARCObject **) &keyObj2);
    ccnxName_Release(&name1);
    ccnxName_Release(&name2);
}
LONGBOW_TEST_CASE(Loca, _createHashableKey_NameAndObjectHash)
{
    // Now try with key Ids.

    CCNxName *name1 = ccnxName_CreateFromURI("lci:/name/1");
    CCNxName *name2 = ccnxName_CreateFromURI("lci:/name/2");

    PARCBuffer *objHash1 = parcBuffer_WrapCString("hash 1");
    PARCBuffer *objHash2 = parcBuffer_WrapCString("hash 2");

    PARCObject *keyObj1 = _createHashableKey(name1, NULL, objHash1);
    PARCObject *keyObj2 = _createHashableKey(name1, NULL, NULL);

    assertFalse(parcObject_HashCode(keyObj1) == 0, "Expected non zero hashcode");
    assertFalse(parcObject_HashCode(keyObj2) == 0, "Expected non zero hashcode");
    assertFalse(parcObject_HashCode(keyObj1) == parcObject_HashCode(keyObj2), "Expected different hashcodes");

    parcObject_Release((PARCObject **) &keyObj1);
    parcObject_Release((PARCObject **) &keyObj2);

    // Different object hashes.

    keyObj1 = _createHashableKey(name1, NULL, objHash1);
    keyObj2 = _createHashableKey(name1, NULL, objHash2);

    assertFalse(parcObject_HashCode(keyObj1) == 0, "Expected non zero hashcode");
    assertFalse(parcObject_HashCode(keyObj2) == 0, "Expected non zero hashcode");
    assertFalse(parcObject_HashCode(keyObj1) == parcObject_HashCode(keyObj2), "Expected different hashcodes");

    parcObject_Release((PARCObject **) &keyObj1);
    parcObject_Release((PARCObject **) &keyObj2);
    parcBuffer_Release(&objHash1);
    parcBuffer_Release(&objHash2);

    // Now try with

    ccnxName_Release(&name1);
    ccnxName_Release(&name2);
}
Esempio n. 8
0
static _PARCHashMapEntry *
_parcHashMap_GetEntry(const PARCHashMap *hashMap, const PARCObject *key)
{
    PARCHashCode keyHash = parcObject_HashCode(key);

    int bucket = keyHash % hashMap->capacity;

    _PARCHashMapEntry *result = NULL;

    if (hashMap->buckets[bucket] != NULL) {
        PARCIterator *iterator = parcLinkedList_CreateIterator(hashMap->buckets[bucket]);

        while (parcIterator_HasNext(iterator)) {
            _PARCHashMapEntry *entry = parcIterator_Next(iterator);
            if (parcObject_Equals(key, entry->key)) {
                result = entry;
                break;
            }
        }
        parcIterator_Release(&iterator);
    }

    return result;
}
Esempio n. 9
0
static PARCHashCode
_parcHashMapEntry_HashCode(const _PARCHashMapEntry *entry)
{
    return parcObject_HashCode(entry->key);
}