LONGBOW_TEST_CASE(Global, parc_Chunker_ReverseIterator_BufferSmall)
{
    PARCBuffer *buffer = parcBuffer_Allocate(16);

    // Special 0xFF to mark the start
    for (int i = 0; i < 16; i++) {
        parcBuffer_PutUint8(buffer, 0xFF);
    }
    parcBuffer_Flip(buffer);

    PARCBufferChunker *chunker = parcBufferChunker_Create(buffer, 32); // each chunk is 32 bytes
    assertNotNull(chunker, "Expected non-NULL Chunker");

    PARCIterator *itr = parcBufferChunker_ReverseIterator(chunker);
    size_t count = 0;
    while (parcIterator_HasNext(itr)) {
        PARCBuffer *payload = (PARCBuffer *) parcIterator_Next(itr);

        uint8_t *contents = parcBuffer_Overlay(payload, 0);
        for (size_t i = 0; i < 16; i++) {
            assertTrue(contents[i] == 0xFF, "Expected %zu at index %zu, got %d", (size_t) 0xFF, i, contents[i]);
        }
        count++;

        parcBuffer_Release(&payload);
    }
    assertTrue(count == 1, "Expected to iterate over 1 content objects from the chunker, but for %zu", count);
    parcIterator_Release(&itr);

    parcBufferChunker_Release(&chunker);
    parcBuffer_Release(&buffer);
}
示例#2
0
bool
parcHashMap_Remove(PARCHashMap *hashMap, const PARCObject *key)
{
    PARCHashCode keyHash = parcObject_HashCode(key);

    int bucket = keyHash % hashMap->capacity;

    bool result = false;

    if (hashMap->buckets[bucket] != NULL) {
        PARCIterator *iterator = parcLinkedList_CreateIterator(hashMap->buckets[bucket]);

        while (parcIterator_HasNext(iterator)) {
            _PARCHashMapEntry *entry = parcIterator_Next(iterator);
            if (parcObject_Equals(key, entry->key)) {
                parcIterator_Remove(iterator);
                hashMap->size--;
                result = true;
                break;
            }
        }
        parcIterator_Release(&iterator);
    }

    // When expanded by 2 the load factor goes from .75 (3/4) to .375 (3/8), if
    // we compress by 2 when the load factor is .25 (1/4) the load
    // factor becomes .5 (1/2).
    double loadFactor = (double)hashMap->size/(double)hashMap->capacity;
    if (loadFactor <= (hashMap->minLoadFactor)) {
        _parcHashMap_Resize(hashMap, hashMap->capacity / 2);
    }

    return result;
}
LONGBOW_TEST_CASE(Global, parc_Chunker_ReverseIterator_Buffer)
{
    PARCBuffer *buffer = parcBuffer_Allocate(1024);

    for (size_t i = 0; i < 32; i++) {
        for (size_t j = 0; j < 32; j++) {
            parcBuffer_PutUint8(buffer, i);
        }
    }
    parcBuffer_Flip(buffer);

    PARCBufferChunker *chunker = parcBufferChunker_Create(buffer, 32); // each chunk is 32 bytes
    assertNotNull(chunker, "Expected non-NULL Chunker");

    PARCIterator *itr = parcBufferChunker_ReverseIterator(chunker);
    size_t count = 0;
    while (parcIterator_HasNext(itr)) {
        PARCBuffer *payload = (PARCBuffer *) parcIterator_Next(itr);

        uint8_t *contents = parcBuffer_Overlay(payload, 0);
        for (size_t i = 0; i < 32; i++) {
            assertTrue(contents[i] == (31 - count), "Expected %zu at index %zu, got %d", (31 - count), i, contents[i]);
        }
        count++;

        parcBuffer_Release(&payload);
    }
    assertTrue(count == 32, "Expected to iterate over 32 content objects from the chunker, but for %zu", count);
    parcIterator_Release(&itr);

    parcBufferChunker_Release(&chunker);
    parcBuffer_Release(&buffer);
}
示例#4
0
static void
_parcHashMap_Resize(PARCHashMap *hashMap, size_t newCapacity)
{
    if (newCapacity < hashMap->minCapacity) {
        return;
    }

    PARCLinkedList **newBuckets = parcMemory_AllocateAndClear(newCapacity * sizeof(PARCLinkedList*));

    for (unsigned int i = 0; i < hashMap->capacity; i++) {
        if (hashMap->buckets[i] != NULL) {
            if (!parcLinkedList_IsEmpty(hashMap->buckets[i])) {
                PARCIterator *elementIt = parcLinkedList_CreateIterator(hashMap->buckets[i]);
                while (parcIterator_HasNext(elementIt)) {
                    _PARCHashMapEntry *entry = parcIterator_Next(elementIt);
                    PARCHashCode keyHash = parcObject_HashCode(entry->key);
                    int newBucket = keyHash % newCapacity;
                    if (newBuckets[newBucket] == NULL) {
                        newBuckets[newBucket] = parcLinkedList_Create();
                    }
                    parcLinkedList_Append(newBuckets[newBucket], entry);
                }
                parcIterator_Release(&elementIt);
            }
            parcLinkedList_Release(&hashMap->buckets[i]);
        }
    }
    PARCLinkedList **cleanupBuckets = hashMap->buckets;
    hashMap->buckets = newBuckets;
    hashMap->capacity = newCapacity;

    parcMemory_Deallocate(&cleanupBuckets);
}
示例#5
0
PARCJSON *
parcHashMap_ToJSON(const PARCHashMap *hashMap)
{
    parcHashMap_OptionalAssertValid(hashMap);

    PARCJSON *result = parcJSON_Create();

    PARCIterator *iterator = parcHashMap_CreateKeyIterator((PARCHashMap *) hashMap);

    while (parcIterator_HasNext(iterator)) {
        PARCObject *keyObject = parcIterator_Next(iterator);
        const PARCObject *valueObject = parcHashMap_Get(hashMap, keyObject);
        char *key = parcObject_ToString(keyObject);
        PARCJSON *value = parcObject_ToJSON(valueObject);

        parcJSON_AddObject(result, key, value);

        parcMemory_Deallocate(&key);
        parcJSON_Release(&value);
    }

    parcIterator_Release(&iterator);


    return result;
}
示例#6
0
static void
dump(PARCSortedList *i)
{
    PARCIterator *iterator = parcSortedList_CreateIterator(i);
    while (parcIterator_HasNext(iterator)) {
        PARCBuffer *buffer = parcIterator_Next(iterator);
        parcBuffer_Display(buffer, 0);
    }

    parcIterator_Release(&iterator);
}
LONGBOW_TEST_CASE(Global, parc_Chunker_ForwardIterator_BufferPartial)
{
    // Allocate something that's not divisible by the chunk size
    PARCBuffer *buffer = parcBuffer_Allocate(1030);

    for (size_t i = 0; i < 32; i++) {
        for (size_t j = 0; j < 32; j++) {
            parcBuffer_PutUint8(buffer, i);
        }
    }

    // Special 0xFF to mark the end...
    for (int i = 0; i < 6; i++) {
        parcBuffer_PutUint8(buffer, 0xFF);
    }

    parcBuffer_Flip(buffer);

    PARCBufferChunker *chunker = parcBufferChunker_Create(buffer, 32); // each chunk is 32 bytes
    assertNotNull(chunker, "Expected non-NULL Chunker");

    PARCIterator *itr = parcBufferChunker_ForwardIterator(chunker);
    size_t count = 0;
    while (parcIterator_HasNext(itr)) {
        PARCBuffer *payload = (PARCBuffer *) parcIterator_Next(itr);

        uint8_t *contents = parcBuffer_Overlay(payload, 0);
        if (count < 32) {
            for (size_t i = 0; i < 32; i++) {
                assertTrue(contents[i] == count, "Expected %zu at index %zu, got %d", count, i, contents[i]);
            }
        } else {
            for (size_t i = 0; i < 6; i++) {
                assertTrue(contents[i] == 0xFF, "Expected %zu at index %zu, got %d", (size_t) 0xFF, i, contents[i]);
            }
        }
        count++;

        parcBuffer_Release(&payload);
    }
    assertTrue(count == 33, "Expected to iterate over 33 content objects from the chunker, but for %zu", count);
    parcIterator_Release(&itr);

    parcBufferChunker_Release(&chunker);
    parcBuffer_Release(&buffer);
}
示例#8
0
PARCBufferComposer *
parcHashMap_BuildString(const PARCHashMap *hashMap, PARCBufferComposer *composer)
{
    PARCIterator *iterator = parcHashMap_CreateKeyIterator((PARCHashMap *) hashMap);

    while (parcIterator_HasNext(iterator)) {
        PARCObject *keyObject = parcIterator_Next(iterator);
        const PARCObject *valueObject = parcHashMap_Get(hashMap, keyObject);
        char *key = parcObject_ToString(keyObject);
        char *value = parcObject_ToString(valueObject);
        parcBufferComposer_Format(composer, "%s -> %s\n", key, value);
        parcMemory_Deallocate(&key);
        parcMemory_Deallocate(&value);
    }

    parcIterator_Release(&iterator);

    return composer;
}
示例#9
0
LONGBOW_TEST_CASE(Global, ccnx_Chunker_ReverseIterator)
{
    int n = 10;

    _DummyChunker *dummy = _dummy_Create(n);
    CCNxChunker *chunker = ccnxChunker_Create(dummy, _MockChunker);
    PARCIterator *itr = ccnxChunker_ReverseIterator(chunker);

    int targetSum = (n * (n + 1)) / 2;
    int sum = 0;
    while (parcIterator_HasNext(itr)) {
        int *val = parcIterator_Next(itr);
        sum += *val;
    }
    assertTrue(targetSum == sum, "Expected the iterator to walk the chunker as needed\n");

    parcIterator_Release(&itr);
    ccnxChunker_Release(&chunker);
    _dummy_Release(&dummy);
}
示例#10
0
bool
parcSortedList_Remove(PARCSortedList *list, const PARCObject *object)
{
    bool result = false;

    PARCIterator *iterator = parcSortedList_CreateIterator(list);

    while (parcIterator_HasNext(iterator)) {
        PARCObject *o = parcIterator_Next(iterator);
        if (parcObject_Equals(object, o)) {
            parcIterator_Remove(iterator);
            result = true;
            break;
        }
    }

    parcIterator_Release(&iterator);

    return result;
}
示例#11
0
void
parcHashMap_Display(const PARCHashMap *hashMap, int indentation)
{
    parcDisplayIndented_PrintLine(indentation, "PARCHashMap@%p {", hashMap);

    PARCIterator *iterator = parcHashMap_CreateKeyIterator((PARCHashMap *) hashMap);

    while (parcIterator_HasNext(iterator)) {
        PARCObject *keyObject = parcIterator_Next(iterator);
        const PARCObject *valueObject = parcHashMap_Get(hashMap, keyObject);
        char *key = parcObject_ToString(keyObject);
        char *value = parcObject_ToString(valueObject);
        parcDisplayIndented_PrintLine(indentation + 1, "%s -> %s", key, value);
        parcMemory_Deallocate(&key);
        parcMemory_Deallocate(&value);
    }
    parcIterator_Release(&iterator);

    parcDisplayIndented_PrintLine(indentation, "}");
}
示例#12
0
static _PARCHashMapEntry *
_parcHashMap_GetEntry(const PARCHashMap *hashMap, const PARCObject *key)
{
    PARCHashCode keyHash = parcObject_HashCode(key);

    int bucket = keyHash % hashMap->capacity;

    _PARCHashMapEntry *result = NULL;

    if (hashMap->buckets[bucket] != NULL) {
        PARCIterator *iterator = parcLinkedList_CreateIterator(hashMap->buckets[bucket]);

        while (parcIterator_HasNext(iterator)) {
            _PARCHashMapEntry *entry = parcIterator_Next(iterator);
            if (parcObject_Equals(key, entry->key)) {
                result = entry;
                break;
            }
        }
        parcIterator_Release(&iterator);
    }

    return result;
}