void generateStats(struct Stats& stats) { stats.uninhabitable += getPadding(); stats.totalMetaData += bitsPerPage-getPadding()-getDataBits()*getMaxCount(); stats.inhabitedMetaData += (getSizeBits()+architectureSize)*header.count; stats.totalPayload += getDataBits()*getMaxCount(); stats.inhabitedPayload += getDataBits()*header.count; }
void init(NativeNaturalType type) { header.type = type; header.count = 0; header.freeIndex = 0; for(NativeNaturalType index = 0; index < getMaxCount(); ++index) { setSize(index, 0); setSymbol(index, index+1); } }
NativeNaturalType allocateIndex(NativeNaturalType size, Symbol symbol, PageRefType pageRef) { assert(size > 0 && header.count < getMaxCount()); ++header.count; NativeNaturalType index = header.freeIndex; header.freeIndex = getSymbol(header.freeIndex); setSize(index, size); setSymbol(index, symbol); if(isFull()) { assert(superPage->fullBlobBuckets.insert(pageRef)); assert(superPage->freeBlobBuckets[header.type].erase<Key>(pageRef)); } return index; }
bool isFull() const { return header.count == getMaxCount(); }
NativeNaturalType getPadding() const { return getDataOffset()-(getSizeOffset()+getSizeBits()*getMaxCount()); }
NativeNaturalType getSymbolOffset() const { return bitsPerPage-architectureSize*getMaxCount(); }
NativeNaturalType getDataOffset() const { return getSymbolOffset()-getDataBits()*getMaxCount(); }
// paste payload into existing .json int pasteJSON(json_t *payload, const char *clientName) { // Extract data from payload const char * name = json_string_value(json_object_get(payload, "name")); int type = json_integer_value(json_object_get(payload, "type")); // Type 2: CSV if (type == 2) { FILE *fp; char* file = composeFileName(clientName, name, "csv"); const char * output = json_string_value(json_object_get(payload, "payload")); fp = fopen(file, "w"); fprintf(fp, "%s",output); fclose(fp); return 1; } json_t *array = json_object_get(payload, "payload"); json_t *root, *dataseq, *graph, *arry; json_error_t error; // Load *.json const char* file = composeFileName(clientName, name, "json"); root = json_load_file(file, 0, &error); // Check for Errors if (!root) { syslog(LOG_ERR, "Unable to load json File! error: on line %d: %s\n", error.line, error.text); exit(1); } // Get old Data graph = json_object_get(root, "graph"); dataseq = getDataSequences(graph); if (!check(dataseq)) { printError(name); } // Process Every Datasequence size_t j; for (j = 0; j < json_array_size(dataseq); j++) { // If Type is 0 /Line (standard case) append new value at the bottom arry = getSingleSeqeunce(dataseq, j); if (strncmp(getType(root), "line", 2) == 0) { if (json_array_size(arry) >= (size_t) getMaxCount()) { if (json_array_remove(arry,0)) { syslog(LOG_ERR, "error in processing %s.json\n", name); return 0; } } if (json_array_append_new(arry, json_array_get(array, j))) { syslog(LOG_ERR, "error in appending new entry in %s.json\n", name); return 0; } // When Type is Bar, every Entry has its own name and you change the value } else { size_t k; for (k = 0; k < json_array_size(arry); k++) { if (json_real_set(json_object_get(json_array_get(arry, k), "value"), json_real_value(json_array_get(array, k))) ) { return 0; } syslog(LOG_ERR, "error in changing entry in %s.json\n", name); } } } dumpJSON(root, file); return 1; }