void serialize(TransactionExtraMergeMiningTag& tag, ISerializer& serializer) { if (serializer.type() == ISerializer::OUTPUT) { std::string field; StringOutputStream os(field); BinaryOutputStreamSerializer output(os); doSerialize(tag, output); serializer(field, ""); } else { std::string field; serializer(field, ""); MemoryInputStream stream(field.data(), field.size()); BinaryInputStreamSerializer input(stream); doSerialize(tag, input); } }
static void writeRelation(A2PWriter writer, A2PType expected, ATermList relation){ A2PRelationType relationType = (A2PRelationType) expected->theType; A2PType tupleType = relationType->tupleType; ISIndexedSet sharedTypes = writer->typeSharingMap; int tupleHash = hashType(tupleType); int tupleTypeId = ISget(sharedTypes, (void*) tupleType, tupleHash); int size = ATgetLength(relation); ATermList next; if(tupleTypeId == -1){ writeByteToBuffer(writer->buffer, PDB_RELATION_HEADER); doWriteType(writer, tupleType); ISstore(sharedTypes, (void*) tupleType, tupleHash); }else{ writeByteToBuffer(writer->buffer, PDB_RELATION_HEADER | PDB_TYPE_SHARED_FLAG); printInteger(writer->buffer, tupleTypeId); } printInteger(writer->buffer, size); next = relation; while(!ATisEmpty(next)){ ATerm current = ATgetFirst(next); next = ATgetNext(next); doSerialize(writer, tupleType, current); } }
static void writeConstructor(A2PWriter writer, A2PType expected, ATermAppl constructor){ A2PConstructorType t = (A2PConstructorType) expected->theType; A2PTupleType children = ((A2PTupleType) t->children->theType); int nrOfChildren = typeArraySize(children->fieldTypes); ISIndexedSet sharedTypes = writer->typeSharingMap; int typeHash = hashType(expected); int constructorTypeId = ISget(sharedTypes, (void*) expected, typeHash); int arity = ATgetArity(ATgetAFun(constructor)); int i; if(arity != nrOfChildren){ fprintf(stderr, "Arity (%d) is unequal to the number of children (%d); term was:\n%s\n", arity, nrOfChildren, ATwriteToString((ATerm) constructor)); exit(1);} if(constructorTypeId == -1){ writeByteToBuffer(writer->buffer, PDB_CONSTRUCTOR_HEADER); doWriteType(writer, expected); ISstore(sharedTypes, (void*) expected, typeHash); }else{ writeByteToBuffer(writer->buffer, PDB_CONSTRUCTOR_HEADER | PDB_TYPE_SHARED_FLAG); printInteger(writer->buffer, constructorTypeId); } printInteger(writer->buffer, arity); for(i = 0; i < arity; i++){ doSerialize(writer, children->fieldTypes[i], ATgetArgument(constructor, i)); } }
static void writeSet(A2PWriter writer, A2PType expected, ATermList set){ A2PSetType setType = (A2PSetType) expected->theType; A2PType elementType = setType->elementType; ISIndexedSet sharedTypes = writer->typeSharingMap; int elementHash = hashType(elementType); int elementTypeId = ISget(sharedTypes, (void*) elementType, elementHash); int size = ATgetLength(set); ATermList next; if(elementTypeId == -1){ writeByteToBuffer(writer->buffer, PDB_SET_HEADER); doWriteType(writer, elementType); ISstore(sharedTypes, (void*) elementType, elementHash); }else{ writeByteToBuffer(writer->buffer, PDB_SET_HEADER | PDB_TYPE_SHARED_FLAG); printInteger(writer->buffer, elementTypeId); } printInteger(writer->buffer, size); next = set; while(!ATisEmpty(next)){ ATerm current = ATgetFirst(next); next = ATgetNext(next); doSerialize(writer, elementType, current); } }
data_chunk Message::Serialize() const { if (!isAllowed(MSG_ALLOWED_UNPACK)) throw Exception("Serialize() called on incomplete Message!"); return doSerialize(); }
static void writeNode(A2PWriter writer, A2PType expected, ATermAppl node){ AFun fun = ATgetAFun(node); int arity = ATgetArity(fun); char *name = ATgetName(fun); int i; unsigned int hash = hashString(name); int nodeNameId = ISstore(writer->nameSharingMap, (void*) name, hash); if(nodeNameId == -1){ int nameLength = dataArraySize(name); writeByteToBuffer(writer->buffer, PDB_NODE_HEADER); printInteger(writer->buffer, nameLength); writeDataToBuffer(writer->buffer, name, nameLength); }else{ writeByteToBuffer(writer->buffer, PDB_NODE_HEADER | PDB_NAME_SHARED_FLAG); printInteger(writer->buffer, nodeNameId); } printInteger(writer->buffer, arity); for(i = 0; i < arity; i++){ doSerialize(writer, A2PvalueType(), ATgetArgument(node, i)); } }
PassRefPtr<SerializedScriptValue> SerializedScriptValueFactory::create(v8::Isolate* isolate, v8::Local<v8::Value> value, MessagePortArray* messagePorts, ArrayBufferArray* arrayBuffers, ImageBitmapArray* imageBitmaps, WebBlobInfoArray* blobInfo, ExceptionState& exceptionState) { RefPtr<SerializedScriptValue> serializedValue = create(); SerializedScriptValueWriter writer; ScriptValueSerializer::Status status; String errorMessage; { v8::TryCatch tryCatch; status = doSerialize(value, writer, messagePorts, arrayBuffers, imageBitmaps, blobInfo, serializedValue.get(), tryCatch, errorMessage, isolate); if (status == ScriptValueSerializer::JSException) { // If there was a JS exception thrown, re-throw it. exceptionState.rethrowV8Exception(tryCatch.Exception()); return serializedValue.release(); } } switch (status) { case ScriptValueSerializer::InputError: case ScriptValueSerializer::DataCloneError: exceptionState.throwDOMException(ScriptValueSerializer::DataCloneError, errorMessage); return serializedValue.release(); case ScriptValueSerializer::Success: transferData(serializedValue.get(), writer, arrayBuffers, imageBitmaps, exceptionState, isolate); return serializedValue.release(); case ScriptValueSerializer::JSException: ASSERT_NOT_REACHED(); break; } ASSERT_NOT_REACHED(); return serializedValue.release(); }
static void writeMap(A2PWriter writer, A2PType expected, ATermList map){ A2PMapType mapType = (A2PMapType) expected->theType; ISIndexedSet sharedTypes = writer->typeSharingMap; int mapHash = hashType(expected); int mapTypeId = ISget(sharedTypes, (void*) expected, mapHash); int size = ATgetLength(map); ATermList next; if(size % 2 == 1){ fprintf(stderr, "Number of elements in the map is unbalanced.\n"); exit(1); } if(mapTypeId == -1){ writeByteToBuffer(writer->buffer, PDB_MAP_HEADER); doWriteType(writer, expected); ISstore(sharedTypes, (void*) expected, mapHash); }else{ writeByteToBuffer(writer->buffer, PDB_MAP_HEADER | PDB_TYPE_SHARED_FLAG); printInteger(writer->buffer, mapTypeId); } printInteger(writer->buffer, size >> 1); next = map; while(!ATisEmpty(next)){ ATerm key = ATgetFirst(map); ATerm value; next = ATgetNext(map); value = ATgetFirst(map); next = ATgetNext(map); doSerialize(writer, mapType->keyType, key); doSerialize(writer, mapType->valueType, value); } }
char *A2Pserialize(ATerm term, A2PType topType, int *length){ A2PWriter writer = createWriter(); ByteBuffer buffer = writer->buffer; char *result; int bufferSize; doSerialize(writer, topType, term); bufferSize = getCurrentByteBufferSize(buffer); result = (char*) malloc(bufferSize); memcpy(result, buffer->buffer, bufferSize); *length = bufferSize; destroyWriter(writer); return result; }
static void writeTuple(A2PWriter writer, A2PType expected, ATermAppl tuple){ A2PTupleType t = (A2PTupleType) expected->theType; A2PType *fieldTypes = t->fieldTypes; int numberOfFieldTypes = typeArraySize(fieldTypes); int arity = ATgetArity(ATgetAFun(tuple)); int i; if(numberOfFieldTypes != arity){ fprintf(stderr, "The number of children specified in the type is not equal to the arity of this tuple.\n"); exit(1); } writeByteToBuffer(writer->buffer, PDB_TUPLE_HEADER); printInteger(writer->buffer, arity); for(i = 0; i < arity; i++){ doSerialize(writer, fieldTypes[i], ATgetArgument(tuple, i)); } }
ScriptValueSerializer::Status SerializedScriptValueFactory::doSerialize(v8::Local<v8::Value> value, SerializedScriptValueWriter& writer, MessagePortArray* messagePorts, ArrayBufferArray* arrayBuffers, ImageBitmapArray* imageBitmaps, WebBlobInfoArray* blobInfo, SerializedScriptValue* serializedValue, v8::TryCatch& tryCatch, String& errorMessage, v8::Isolate* isolate) { return doSerialize(value, writer, messagePorts, arrayBuffers, imageBitmaps, blobInfo, serializedValue->blobDataHandles(), tryCatch, errorMessage, isolate); }
// Encodes a tree to a single string. string serialize(TreeNode *root) { string res; doSerialize(root, res); return res; }