LGGOCXXSharedMemoryDescriptor LGGOCXXStoreSegment::getDescriptorForAddress (LGGOCXXAddress address) { LGGOCXXSharedMemoryDescriptor retval = LGGOCXXSharedMemoryDescriptor((LGGOCXXMemoryDescriptor *)NULL); uint8_t *segmentData = (uint8_t *)descriptor->getData(); uint64_t *pointerTablePointers = (uint64_t *)(&segmentData[pointerPointerTableOffset]); // char *pointerHashTable = (char *)(&segmentData[pointerHashTableOffset]); //FIXME make this a binary search uint64_t i; for (i = 0; i < pointerCount; ++i) { if (LGGOCXXAddress(pointerTablePointers[i]) == address) { break; } } //FIXME in the future use this with the shared hash infrastructure to save some memory LGGOCXXIntrusiveStoreHash hash = LGGOCXXIntrusiveStoreHash(descriptor, pointerHashTableOffset+(i*CC_SHA224_DIGEST_LENGTH)); //FIXME make this a binary search for (i = 0; i < hashCount; ++i) { //FIX 32, it should be a constant, not magic LGGOCXXIntrusiveStoreHash potentialHash = LGGOCXXIntrusiveStoreHash(descriptor, hashHashTableOffset+(i*HASHTABLE_RECORD_SIZE)); if (hash == potentialHash) { uint64_t objectHashOffset = LGGOSwapLittleToHost64(*((uint64_t *)&segmentData[hashHashTableOffset+(i*HASHTABLE_RECORD_SIZE)+CC_SHA224_DIGEST_LENGTH])); uint64_t baseOffset = objectHashOffset + hashDataTableOffset; uint64_t objectSize = LGGOSwapLittleToHost64(*((uint64_t *)&segmentData[baseOffset+0])); uint64_t compressedObjectSize = LGGOSwapLittleToHost64(*((uint64_t *)&segmentData[baseOffset+8])); uint64_t offset = baseOffset + 16; retval = LGGOCXXSharedMemoryDescriptor(new LGGOCXXCompressedChildMemoryDescriptor(descriptor, offset, compressedObjectSize, objectSize)); break; } } return retval; }
LGGOCXXStringRef::LGGOCXXStringRef(std::string S) : LGGOCXXReference(), charLength(0), lengthCalculated(false) { byteLength = S.size(); if(byteLength <= 7) { uint32_t i; uint8_t buffer[8]; buffer[0] = kLGGOAddressStringType | (uint8_t)byteLength << 4; for(i = 0; i < byteLength; ++i) { buffer[i+1] = S[i]; } stringDescriptor = LGGOCXXSharedMemoryDescriptor(new LGGOCXXTaggedMemoryDescriptor(*((uint64_t *)&buffer[0]))); } else { uint8_t *buffer = static_cast<uint8_t *>(malloc(byteLength+1)); buffer[0] = kLGGOAddressStringType | (uint8_t)0xff << 4; //FIXME check this memcpy (&buffer[1], S.data(), byteLength); stringDescriptor = LGGOCXXSharedMemoryDescriptor(new LGGOCXXMemoryDescriptor(buffer, byteLength+1, false, false)); } }
LGGOCXXSharedMemoryDescriptor LGGOCXXWritableStoreSegment::serializeToMemory(void) { std::map<LGGOCXXSharedStoreHash, LGGOCXXSharedMemoryDescriptor> hashMap; std::vector<std::tr1::tuple<uint64_t, uint64_t, LGGOCXXSharedMemoryDescriptor> > compressedDecriptors; std::map<LGGOCXXAddress,LGGOCXXSharedMemoryDescriptor>::iterator i; std::map<LGGOCXXSharedStoreHash, LGGOCXXSharedMemoryDescriptor>::iterator j; uint64_t k; uint64_t pointerCount = memoryObjects.size(); uint64_t pointerTablePointersSize = sizeof(LGGOCXXAddress)*pointerCount; uint64_t *pointerTablePointers = (uint64_t *)malloc(pointerTablePointersSize); unsigned char *pointerTableHashes = (unsigned char *)malloc(CC_SHA224_DIGEST_LENGTH*sizeof(unsigned char)*pointerCount); //Walk through the pointer map and build the tables //We use two consecutive tables to improve data locality for (i = memoryObjects.begin(), k = 0; i != memoryObjects.end(); ++i, ++k) { //Build the pointer to hash table pointerTablePointers[k] = i->first.getConcreteAddressValue(); LGGOCXXSharedMemoryDescriptor object = i->second; unsigned char * startPoint = &pointerTableHashes[k*CC_SHA224_DIGEST_LENGTH]; LGGOCXXSharedStoreHash hash = object->getHash(); bcopy(hash->getHashPointer(), startPoint, CC_SHA224_DIGEST_LENGTH); //Stuff the objects into a hash table while forcing them to compress //FIXME test if this is a writable segment via a dynamic cast hashMap[hash] = object; } uint64_t currentOffset = 0; uint64_t totalCompressedObjectSize = 0; uint64_t hashCount = hashMap.size(); uint8_t *hashTableHashes = (uint8_t *)malloc(HASHTABLE_RECORD_SIZE*hashCount); //Walk through and build the hash lookup table //We intermix the data because SHA224 is 24 bytes anyway, so adding 8 bytes makes in 32, preserving cacheline alignment for (j = hashMap.begin(), k = 0; j != hashMap.end(); ++j, ++k) { LGGOCXXSharedMemoryDescriptor compressedDescriptor = j->second->compressedDescriptor(); uint64_t objectSize = j->second->getSize(); uint64_t compressedObjectSize = compressedDescriptor->getSize(); totalCompressedObjectSize += (compressedObjectSize + 2*sizeof(uint64_t)); unsigned char * startPoint = &hashTableHashes[k*HASHTABLE_RECORD_SIZE]; uint64_t *offset = ((uint64_t *)(&startPoint[CC_SHA224_DIGEST_LENGTH])); bcopy(j->first->getHashPointer(), startPoint, CC_SHA224_DIGEST_LENGTH); *offset = LGGOSwapHostToLittle64(currentOffset); currentOffset += (compressedObjectSize + 2*sizeof(uint64_t)); compressedDecriptors.push_back(std::tr1::make_tuple(objectSize, compressedObjectSize, compressedDescriptor)); } currentOffset = 0; uint8_t *hashTableObjects = (uint8_t *)malloc(totalCompressedObjectSize); std::vector<std::tr1::tuple<uint64_t, uint64_t, LGGOCXXSharedMemoryDescriptor> >::iterator l; //Walk through and copy the compressed descriptors into contiguous memory for (l = compressedDecriptors.begin(); l != compressedDecriptors.end(); ++l) { std::tr1::tuple<uint64_t, uint64_t, LGGOCXXSharedMemoryDescriptor> descriptorTuple = *l; uint64_t objectSize = std::tr1::get<0>(descriptorTuple); uint64_t compressedObjectSize = std::tr1::get<1>(descriptorTuple); LGGOCXXSharedMemoryDescriptor descriptor = std::tr1::get<2>(descriptorTuple); assert(currentOffset + compressedObjectSize <= totalCompressedObjectSize); uint8_t * startPoint = &hashTableObjects[currentOffset]; uint64_t *size = ((uint64_t *)(&startPoint[0])); uint64_t *compressedSize = ((uint64_t *)(&startPoint[8])); uint8_t *objectData = ((uint8_t *)(&startPoint[16])); *size = LGGOSwapHostToLittle64(objectSize); *compressedSize = LGGOSwapHostToLittle64(compressedObjectSize); bcopy(descriptor->getData(), objectData, compressedObjectSize); currentOffset = currentOffset + compressedObjectSize+2*sizeof(uint64_t); } //Make one large segment to lay it all out uint64_t totalSize = (4*sizeof(uint64_t)) + pointerTablePointersSize + (CC_SHA224_DIGEST_LENGTH*sizeof(unsigned char)*pointerCount) + (HASHTABLE_RECORD_SIZE*hashCount) + totalCompressedObjectSize; uint8_t *segmentData = (uint8_t *)malloc(totalSize+1); //Write Magic //FIXME we need to cryptographically secure the pointer table here *((uint64_t *)(&segmentData[0])) = LGGOSwapHostToLittle64(SEGMENT_MAGIC); *((uint64_t *)(&segmentData[8])) = LGGOSwapHostToLittle64(0x0); //version and checksum will go here *((uint64_t *)(&segmentData[16])) = LGGOSwapHostToLittle64(pointerCount); *((uint64_t *)(&segmentData[24])) = LGGOSwapHostToLittle64(hashCount); currentOffset = 32; bcopy(pointerTablePointers, &segmentData[currentOffset], (sizeof(LGGOCXXAddress)*pointerCount)); currentOffset = currentOffset + (sizeof(LGGOCXXAddress)*pointerCount); free(pointerTablePointers); bcopy(pointerTableHashes, &segmentData[currentOffset], (CC_SHA224_DIGEST_LENGTH*sizeof(unsigned char)*pointerCount)); currentOffset = currentOffset + (CC_SHA224_DIGEST_LENGTH*sizeof(unsigned char)*pointerCount); free(pointerTableHashes); bcopy(hashTableHashes, &segmentData[currentOffset], HASHTABLE_RECORD_SIZE*hashCount); currentOffset = currentOffset + (HASHTABLE_RECORD_SIZE*hashCount); free(hashTableHashes); bcopy(hashTableObjects, &segmentData[currentOffset], totalCompressedObjectSize); free(hashTableObjects); return LGGOCXXSharedMemoryDescriptor(new LGGOCXXMemoryDescriptor(segmentData, totalSize, false, true)); }