예제 #1
0
/*!
	\fn string Event::serialize()
	\brief serializes the events in json format
	\author Phil Harmston
*/
string Event::serialize()
{
	stringstream output;

	if(buffer.size() > 0)
        return buffer;

	output << "\"" << id << "\":{";

	//destroyed
	if( destroyed != NULL)
		output << "\"destroyed\": " << serialize_map(destroyed);

	//damaged
	if( damaged != NULL)
		output << "\"damaged\": " << serialize_map(damaged);

	//affected
	if( affected != NULL)
		output << "\"affected\": " << serialize_map(affected);

	//actor
	if( instigator != NULL)
		output << "\"instigator\": \"" << instigator->get_userid() << "\",";

	//origin
	output << "\"origin\": \"" << origin.x << " " << origin.y << "\",";

	//alerts
	output << "\"alerts\":\"" << outcome << "\"}";

	buffer = output.str();

	return buffer;
}
예제 #2
0
void types_visitor::serialize_complex_item(complex_item* item)
{
    switch(item->type)
    {
    case COMPLEX_SIMPLE: serialize_simple_item(item->data._simple); break;
    case COMPLEX_TUPLE:  serialize_tuple(item->data._tuple); break;
    case COMPLEX_BAG:    serialize_bag(item->data._bag); break;
    case COMPLEX_MAP:    serialize_map(item->data._map); break;
    }
}
예제 #3
0
void createProbingPT(const char * phrasetable_path, const char * target_path){
    //Get basepath and create directory if missing
    std::string basepath(target_path);
    mkdir(basepath.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);

    //Set up huffman and serialize decoder maps.
    Huffman huffmanEncoder(phrasetable_path); //initialize
    huffmanEncoder.assign_values();
    huffmanEncoder.produce_lookups();
    huffmanEncoder.serialize_maps(target_path);

    //Get uniq lines:
    unsigned long uniq_entries = huffmanEncoder.getUniqLines();

    //Source phrase vocabids
    std::map<uint64_t, std::string> source_vocabids;

    //Read the file
    util::FilePiece filein(phrasetable_path);

    //Init the probing hash table
    size_t size = Table::Size(uniq_entries, 1.2);
    char * mem = new char[size];
    memset(mem, 0, size);
    Table table(mem, size);

    BinaryFileWriter binfile(basepath); //Init the binary file writer.

    line_text prev_line; //Check if the source phrase of the previous line is the same

    //Keep track of the size of each group of target phrases
    uint64_t entrystartidx = 0;
    //uint64_t line_num = 0;


    //Read everything and processs
    while(true){
        try {
            //Process line read
            line_text line;
            line = splitLine(filein.ReadLine());
            //Add source phrases to vocabularyIDs
            add_to_map(&source_vocabids, line.source_phrase);

            if ((binfile.dist_from_start + binfile.extra_counter) == 0) {
                prev_line = line; //For the first iteration assume the previous line is
            } //The same as this one.

            if (line.source_phrase != prev_line.source_phrase){

                //Create a new entry even

                //Create an entry for the previous source phrase:
                Entry pesho;
                pesho.value = entrystartidx;
                //The key is the sum of hashes of individual words. Probably not entirerly correct, but fast
                pesho.key = 0;
                std::vector<uint64_t> vocabid_source = getVocabIDs(prev_line.source_phrase);
                for (int i = 0; i < vocabid_source.size(); i++){
                    pesho.key += vocabid_source[i];
                }
                pesho.bytes_toread = binfile.dist_from_start + binfile.extra_counter - entrystartidx;

                //Put into table
                table.Insert(pesho);

                entrystartidx = binfile.dist_from_start + binfile.extra_counter; //Designate start idx for new entry

                //Encode a line and write it to disk.
                std::vector<unsigned char> encoded_line = huffmanEncoder.full_encode_line(line);
                binfile.write(&encoded_line);

                //Set prevLine
                prev_line = line;

            } else{
                //If we still have the same line, just append to it:
                std::vector<unsigned char> encoded_line = huffmanEncoder.full_encode_line(line);
                binfile.write(&encoded_line);
            }

        } catch (util::EndOfFileException e){
            std::cerr << "Reading phrase table finished, writing remaining files to disk." << std::endl;
            binfile.flush();

            //After the final entry is constructed we need to add it to the phrase_table
            //Create an entry for the previous source phrase:
            Entry pesho;
            pesho.value = entrystartidx;
            //The key is the sum of hashes of individual words. Probably not entirerly correct, but fast
            pesho.key = 0;
            std::vector<uint64_t> vocabid_source = getVocabIDs(prev_line.source_phrase);
            for (int i = 0; i < vocabid_source.size(); i++){
                pesho.key += vocabid_source[i];
            }
            pesho.bytes_toread = binfile.dist_from_start + binfile.extra_counter - entrystartidx;
            //Put into table
            table.Insert(pesho);

            break;
        }
    }

    serialize_table(mem, size, (basepath + "/probing_hash.dat").c_str());

    serialize_map(&source_vocabids, (basepath + "/source_vocabids").c_str());
    
    delete[] mem;

    //Write configfile
    std::ofstream configfile;
    configfile.open((basepath + "/config").c_str());
    configfile << uniq_entries << '\n';
    configfile.close();
}
예제 #4
0
 inline bool operator()( NodeType & dest , const MapType & src ) const
 {
     return serialize_map( dest, src );
 }