Example #1
0
void Callback(JSONNode & test, void * ide){
	assertEquals(ide, (void*)0xDEADBEEF);
    ++counter;
    switch(counter){
	   case 1:
		  assertEquals(test.type(), JSON_NODE);
		  assertTrue(test.empty());
		  break;
	   case 2:
		  assertEquals(test.type(), JSON_ARRAY);
		  assertTrue(test.empty());
		  break;
	   case 3:
		  assertEquals(test.type(), JSON_NODE);
		  assertEquals(test.size(), 1);
		  assertEquals(test[0].name(), JSON_TEXT("hello"));
		  assertEquals(test[0].as_int(), 1);
		  break;
	   case 4:
		  assertEquals(test.type(), JSON_ARRAY);
		  assertEquals(test.size(), 3);
		  break;
	   case 5:
		  assertEquals(test.type(), JSON_NODE);
		  assertEquals(test.size(), 1);
		  assertEquals(test[0].name(), JSON_TEXT("hi"));
		  assertEquals(test[0].size(), 1)
		  assertEquals(test[0][0].type(), JSON_NUMBER);
		  assertEquals(test[0][0].name(), JSON_TEXT("one"));
		  assertEquals(test[0][0].as_int(), 1);
		  break;
    }
}
Example #2
0
bool JSONReader::parseAll(const std::string& text, IdType& nextId, std::string& filepath, std::string& checksum,
    std::vector<std::shared_ptr<MetadataStream::VideoSegment>>& segments,
    std::vector<std::shared_ptr<MetadataSchema>>& schemas,
    std::vector<std::shared_ptr<MetadataInternal>>& metadata)
{
    if(text.empty())
    {
        VMF_LOG_ERROR("Empty input JSON string");
        return false;
    }

    schemas.clear();
    metadata.clear();

    JSONNode root;
    try
    {
        root = libjson::parse(text);
    }
    catch(...)
    {
        VMF_LOG_ERROR("Can't get JSON root");
        return false;
    }

    if(root.size() != 1)
    {
        VMF_LOG_ERROR("More than one JSON root");
        return false;
    }

    JSONNode localRootNode = root[0];

    if( localRootNode.name() == TAG_VMF )
    {
        auto nextIdIter = localRootNode.find(ATTR_VMF_NEXTID);
        if(nextIdIter != localRootNode.end() )
            nextId = nextIdIter->as_int();
        auto filepathIter = localRootNode.find(ATTR_VMF_FILEPATH);
        if(filepathIter != localRootNode.end() )
            filepath = filepathIter->as_string();
        auto checksumIter = localRootNode.find(ATTR_VMF_CHECKSUM);
        if(checksumIter != localRootNode.end() )
            checksum = checksumIter->as_string();

	if(!parseVideoSegments(text, segments))
	    return false;
        if(!parseSchemas(text, schemas))
            return false;
        if(!parseMetadata(text, schemas, metadata))
            return false;
    }
    else
    {
        VMF_LOG_ERROR("Root JSON element isn't 'vmf'");
        return false;
    }

    return true;
}
Example #3
0
void CDropbox::CommandContent(void *arg)
{
	CommandParam *param = (CommandParam*)arg;

	char *path = (char*)param->data;
	if (path == NULL)
		path = "";

	ptrA token(db_get_sa(NULL, MODULE, "TokenSecret"));
	ptrA encodedPath(mir_utf8encode(path));
	GetMetadataRequest request(token, encodedPath);
	NLHR_PTR response(request.Send(param->instance->hNetlibConnection));

	if (response == NULL || response->resultCode != HTTP_STATUS_OK) {
		ProtoBroadcastAck(MODULE, param->hContact, ACKTYPE_MESSAGE, ACKRESULT_FAILED, param->hProcess, 0);
		return;
	}

	JSONNode root = JSONNode::parse(response->pData);
	if (root.empty()) {
		ProtoBroadcastAck(MODULE, param->hContact, ACKTYPE_MESSAGE, ACKRESULT_FAILED, param->hProcess, 0);
		return;
	}

	CMStringA message;
	bool isDir = root.at("is_dir").as_bool();
	if (!isDir)
		message.AppendFormat("\"%s\" %s", encodedPath, T2Utf(TranslateT("is file")));
	else {
		JSONNode content = root.at("contents").as_array();
		for (size_t i = 0; i < content.size(); i++) {
			JSONNode item = content[i];
			if (item.empty()) {
				if (i == 0)
					message.AppendFormat("\"%s\" %s", encodedPath, T2Utf(TranslateT("is empty")));
				break;
			}

			CMStringA subName(item.at("path").as_string().c_str());
			message.AppendFormat("%s\n", (subName[0] == '/') ? subName.Mid(1) : subName);
		}
	}

	ProtoBroadcastAck(MODULE, param->hContact, ACKTYPE_MESSAGE, ACKRESULT_SUCCESS, param->hProcess, 0);
	CallContactService(param->instance->GetDefaultContact(), PSR_MESSAGE, 0, (LPARAM)message.GetBuffer());
}
Example #4
0
void TestSuite::TestInspectors(void){
    UnitTest::SetPrefix("TestInspectors.cpp - Inspectors");
    JSONNode test = JSONNode(JSON_NULL);
    #ifdef JSON_CASTABLE
	   assertEquals(test.as_string(), JSON_TEXT(""));
	   assertEquals(test.as_int(), 0);
	   assertEquals(test.as_float(), 0.0f);
	   assertEquals(test.as_bool(), false);
    #endif

    test = 15.5f;
    assertEquals(test.type(), JSON_NUMBER);
  #ifdef JSON_CASTABLE
    assertEquals(test.as_string(), JSON_TEXT("15.5"));
  #endif
    assertEquals(test.as_int(), 15);
    assertEquals(test.as_float(), 15.5f);
    #ifdef JSON_CASTABLE
	   assertEquals(test.as_bool(), true);
    #endif

    test = 0.0f;
    assertEquals(test.type(), JSON_NUMBER);
  #ifdef JSON_CASTABLE
    assertEquals(test.as_string(), JSON_TEXT("0"));
  #endif
    assertEquals(test.as_int(), 0);
    assertEquals(test.as_float(), 0.0f);
    #ifdef JSON_CASTABLE
	   assertEquals(test.as_bool(), false);
    #endif

    test = true;
    assertEquals(test.type(), JSON_BOOL);
    #ifdef JSON_CASTABLE
	   assertEquals(test.as_string(), JSON_TEXT("true"));
	   assertEquals(test.as_int(), 1);
	   assertEquals(test.as_float(), 1.0f);
    #endif
    assertEquals(test.as_bool(), true);

    test = false;
    assertEquals(test.type(), JSON_BOOL);
    #ifdef JSON_CASTABLE
	   assertEquals(test.as_string(), JSON_TEXT("false"));
	   assertEquals(test.as_int(), 0);
	   assertEquals(test.as_float(), 0.0f);
    #endif
    assertEquals(test.as_bool(), false);

    #ifdef JSON_CASTABLE
	   test.cast(JSON_NODE);
    #else
	   test = JSONNode(JSON_NODE);
    #endif
    assertEquals(test.type(), JSON_NODE);
    assertEquals(test.size(), 0);
    test.push_back(JSONNode(JSON_TEXT("hi"), JSON_TEXT("world")));
    test.push_back(JSONNode(JSON_TEXT("hello"), JSON_TEXT("mars")));
    test.push_back(JSONNode(JSON_TEXT("salut"), JSON_TEXT("france")));
    assertEquals(test.size(), 3);
    TestSuite::testParsingItself(test);

    #ifdef JSON_CASTABLE
	   JSONNode casted = test.as_array();
	   #ifdef JSON_UNIT_TEST
		  assertNotEquals(casted.internal, test.internal);
	   #endif
	   assertEquals(casted.type(), JSON_ARRAY);
	   assertEquals(test.type(), JSON_NODE);
	   assertEquals(test.size(), 3);
	   assertEquals(casted.size(), 3);
	   TestSuite::testParsingItself(casted);
    #endif

    UnitTest::SetPrefix("TestInspectors.cpp - Location");

    try {
	   #ifdef JSON_CASTABLE
		  assertEquals(casted.at(0), JSON_TEXT("world"));
		  assertEquals(casted.at(1), JSON_TEXT("mars"));
		  assertEquals(casted.at(2), JSON_TEXT("france"));
		  assertEquals(casted.at(0).name(), JSON_TEXT(""));
		  assertEquals(casted.at(1).name(), JSON_TEXT(""));
		  assertEquals(casted.at(2).name(), JSON_TEXT(""));
	   #endif
	   assertEquals(test.at(0), JSON_TEXT("world"));
	   assertEquals(test.at(1), JSON_TEXT("mars"));
	   assertEquals(test.at(2), JSON_TEXT("france"));
	   assertEquals(test.at(0).name(), JSON_TEXT("hi"));
	   assertEquals(test.at(1).name(), JSON_TEXT("hello"));
	   assertEquals(test.at(2).name(), JSON_TEXT("salut"));
    } catch (std::out_of_range){
	   FAIL("exception caught");
    }

    try {
	   assertEquals(test.at(JSON_TEXT("hi")), JSON_TEXT("world"));
	   assertEquals(test.at(JSON_TEXT("hello")), JSON_TEXT("mars"));
	   assertEquals(test.at(JSON_TEXT("salut")), JSON_TEXT("france"));
	   #ifdef JSON_CASE_INSENSITIVE_FUNCTIONS
		  assertEquals(test.at_nocase(JSON_TEXT("SALUT")), JSON_TEXT("france"));
		  assertEquals(test.at_nocase(JSON_TEXT("HELLO")), JSON_TEXT("mars"));
		  assertEquals(test.at_nocase(JSON_TEXT("HI")), JSON_TEXT("world"));
	   #endif
    } catch (std::out_of_range){
	   FAIL("exception caught");
    }

    assertException(test.at(JSON_TEXT("meh")), std::out_of_range);
    #ifdef JSON_CASE_INSENSITIVE_FUNCTIONS
	   assertException(test.at_nocase(JSON_TEXT("meh")), std::out_of_range);
    #endif

    assertEquals(test[JSON_TEXT("hi")], json_string(JSON_TEXT("world")));
    assertEquals(test[JSON_TEXT("hello")], json_string(JSON_TEXT("mars")));
    assertEquals(test[JSON_TEXT("salut")], json_string(JSON_TEXT("france")));
    assertEquals(test[0], JSON_TEXT("world"));
    assertEquals(test[1], JSON_TEXT("mars"));
    assertEquals(test[2], JSON_TEXT("france"));

    #ifdef JSON_ITERATORS
	  #ifdef JSON_CASTABLE
	   UnitTest::SetPrefix("TestInspectors.cpp - Iterators");
	   for(JSONNode::iterator it = casted.begin(), end = casted.end(); it != end; ++it){
		  assertEquals((*it).name(), JSON_TEXT(""));
	   }
	  #endif
    #endif

    #ifdef JSON_BINARY
	   UnitTest::SetPrefix("TestInspectors.cpp - Binary");
	   test.set_binary((const unsigned char *)"Hello World", 11);
	   assertEquals(test.type(), JSON_STRING);
	   assertEquals(test.as_string(), JSON_TEXT("SGVsbG8gV29ybGQ="));
	   assertEquals(test.as_binary(), "Hello World");
	   assertEquals(test.as_binary().size(), 11);

	   test = JSON_TEXT("Hello World");
	   assertEquals(test.type(), JSON_STRING);
	   assertEquals(test.as_string(), JSON_TEXT("Hello World"));
	   #ifdef JSON_SAFE
		  assertEquals(test.as_binary(), "");
	   #endif
    #endif

   #ifdef JSON_READ_PRIORITY
	  //This is a regression test for a bug in at()
	  json_string buffer(JSON_TEXT("{ \"myValue1\" : \"foo\", \"myValue2\" : \"bar\"}"));
	  JSONNode current = libjson::parse(buffer);
	  try {
		  JSONNode & value1 = current[JSON_TEXT("myValue1")];
		  assertEquals(value1.as_string(), JSON_TEXT("foo"));
		  JSONNode & value2 = current[JSON_TEXT("myValue2")];
		  assertEquals(value2.as_string(), JSON_TEXT("bar"));
	  } catch (...){
		  assertTrue(false);
	  }
  #endif
}
TEST_F(SerializerUnittest, Write)
{
    Serializer serializer;
    serializer.Init(".", "Data", SerializerOpenModeWrite);

    // Create 4D field with JILK storage plus stride 2
    const int isize = 6, jsize = 8, ksize = 7, lsize = 4;
    const int kstride = 2, lstride = kstride*ksize, istride = lstride*lsize, jstride = istride*isize;
    const int ibstrided = istride*sizeof(double), jbstrided = jstride*sizeof(double),
              kbstrided = kstride*sizeof(double), lbstrided = lstride*sizeof(double);
    const int ibstridef = istride*sizeof(float), jbstridef = jstride*sizeof(float),
              kbstridef = kstride*sizeof(float), lbstridef = lstride*sizeof(float);
    const int allocsize = jstride*jsize;
    std::vector<double> dataD(allocsize);
    std::vector<float> dataF(allocsize);

    for (int i = 0; i < isize; ++i)
        for (int j = 0; j < jsize; ++j)
            for (int k = 0; k < ksize; ++k)
                for (int l = 0; l < lsize; ++l)
                {
                    dataD[i*istride + j*jstride + k*kstride + l*lstride]
                        = i*12.25 + j*k*1. - 2.75 / (l+10.);
                    dataF[i*istride + j*jstride + k*kstride + l*lstride]
                        = i*12.25 + j*k*1. - 2.75 / (l+10.);
                }

    // Register various versions of this field
    serializer.RegisterField("ik"  , "double", 8, isize,     1, ksize,     1, 0, 0, 0, 0, 0, 0, 0, 0);
    serializer.RegisterField("jk"  , "float" , 4,     1, jsize, ksize,     1, 0, 0, 0, 0, 0, 0, 0, 0);
    serializer.RegisterField("ikl" , "double", 8, isize,     1, ksize, lsize, 0, 0, 0, 0, 0, 0, 0, 0);
    serializer.RegisterField("ijkl", "float" , 4, isize, jsize, ksize, lsize, 0, 0, 0, 0, 0, 0, 0, 0);

    // Setting a savepoint and serializing all fields
    Savepoint sp;
    sp.Init("MySavepoint");
    sp.AddMetainfo("Value", 3.125);
    sp.AddMetainfo("ValueF", 3.625f);
    sp.AddMetainfo("MyName", "Andrea Arteaga");
    sp.AddMetainfo("MyAge", 26);
    sp.AddMetainfo("ILikeThis", true);

    const int ilevel = 3, jlevel = 0, llevel = 2;

    const double*   pIKData = dataD.data() + jstride*jlevel + lstride*llevel;
    const float*    pJKData = dataF.data() + istride*ilevel + lstride*llevel;
    const double*  pIKLData = dataD.data() + jstride*jlevel;
    const float*  pIJKLData = dataF.data();

    serializer.WriteField(  "ik", sp,   pIKData, ibstrided, jbstrided, kbstrided, lbstrided);
    serializer.WriteField(  "jk", sp,   pJKData, ibstridef, jbstridef, kbstridef, lbstridef);
    serializer.WriteField( "ikl", sp,  pIKLData, ibstrided, jbstrided, kbstrided, lbstrided);
    serializer.WriteField("ijkl", sp, pIJKLData, ibstridef, jbstridef, kbstridef, lbstridef);

    // Check metainfo
    std::ifstream dbfs("Data.json");
    std::string dbtxt((std::istreambuf_iterator<char>(dbfs)),
                       std::istreambuf_iterator<char>());
    dbfs.close();
    JSONNode dbnode = libjson::parse(dbtxt);
    JSONNode fieldstable, offsettable, globalinfo;
    ASSERT_EQ(3, dbnode.size());
    ASSERT_NO_THROW(fieldstable = dbnode.at("FieldsTable"));
    ASSERT_NO_THROW(offsettable = dbnode.at("OffsetTable"));
    ASSERT_NO_THROW(globalinfo  = dbnode.at("GlobalMetainfo"));
    ASSERT_EQ(4, fieldstable.size());
    ASSERT_EQ(1, offsettable.size());
    ASSERT_LE(1, globalinfo.size());
    ASSERT_EQ(std::string("centralized"), globalinfo.at("__format").as_string());

    // Check savepoint metainfo
    Savepoint spload;
    ASSERT_EQ(std::string("MySavepoint"), offsettable[0]["__name"].as_string());
    ASSERT_EQ(0, offsettable[0]["__id"].as_int());
    ASSERT_EQ(3.125, offsettable[0]["Value"].as_float());
    ASSERT_EQ(3.625f, offsettable[0]["ValueF"].as_float());
    ASSERT_EQ(std::string("Andrea Arteaga"), offsettable[0]["MyName"].as_string());
    ASSERT_EQ(26, offsettable[0]["MyAge"].as_int());
    ASSERT_TRUE(offsettable[0]["ILikeThis"].as_bool());

    {   // Check IK

        // Read data
        std::ifstream fs("Data_ik.dat", std::ios::binary);
        fs.seekg(0, fs.end);
        int filelength = fs.tellg();
        fs.seekg(0, fs.beg);
        ASSERT_EQ(sizeof(double)*isize*ksize, filelength);

        std::vector<char> rawdata(filelength);
        fs.read(rawdata.data(), filelength);
        fs.close();

        // Check data
        const double* pNewData = reinterpret_cast<const double*>(rawdata.data());
        for (int i = 0; i < isize; ++i)
            for (int k = 0; k < ksize; ++k)
            {
                const int j = jlevel, l = llevel;
                const double value = *(pNewData + k*isize + i);
                const double reference = dataD[i*istride + j*jstride + k*kstride + l*lstride];
                ASSERT_EQ(reference, value) << "i=" << i << ", k=" << k;
            }
    }

    {   // Check JK

        // Read data
        std::ifstream fs("Data_jk.dat", std::ios::binary);
        fs.seekg(0, fs.end);
        int filelength = fs.tellg();
        fs.seekg(0, fs.beg);
        ASSERT_EQ(sizeof(float)*jsize*ksize, filelength);

        std::vector<char> rawdata(filelength);
        fs.read(rawdata.data(), filelength);
        fs.close();

        // Check data
        const float* pNewData = reinterpret_cast<const float*>(rawdata.data());
        for (int j = 0; j < jsize; ++j)
            for (int k = 0; k < ksize; ++k)
            {
                const int i = ilevel, l = llevel;
                const float value = *(pNewData + k*jsize + j);
                const float reference = dataF[i*istride + j*jstride + k*kstride + l*lstride];
                ASSERT_EQ(reference, value) << "j=" << j << ", k=" << k;
            }
    }

    {   // Check IKL

        // Read data
        std::ifstream fs("Data_ikl.dat", std::ios::binary);
        fs.seekg(0, fs.end);
        int filelength = fs.tellg();
        fs.seekg(0, fs.beg);
        ASSERT_EQ(sizeof(double)*isize*ksize*lsize, filelength);

        std::vector<char> rawdata(filelength);
        fs.read(rawdata.data(), filelength);
        fs.close();

        // Check data
        const double* pNewData = reinterpret_cast<const double*>(rawdata.data());
        for (int i = 0; i < isize; ++i)
            for (int k = 0; k < ksize; ++k)
                for (int l = 0; l < lsize; ++l)
                {
                    const int j = jlevel;
                    const double value = *(pNewData + l*ksize*isize + k*isize + i);
                    const double reference = dataD[i*istride + j*jstride + k*kstride + l*lstride];
                    ASSERT_EQ(reference, value) << "i=" << i << ", k=" << k << ", l=" << l;
                }
    }

    {   // Check IJKL

        // Read data
        std::ifstream fs("Data_ijkl.dat", std::ios::binary);
        fs.seekg(0, fs.end);
        int filelength = fs.tellg();
        fs.seekg(0, fs.beg);
        ASSERT_EQ(sizeof(float)*isize*jsize*ksize*lsize, filelength);

        std::vector<char> rawdata(filelength);
        fs.read(rawdata.data(), filelength);
        fs.close();

        // Check data
        const float* pNewData = reinterpret_cast<const float*>(rawdata.data());
        for (int i = 0; i < isize; ++i)
            for (int j = 0; j < jsize; ++j)
                for (int k = 0; k < ksize; ++k)
                    for (int l = 0; l < lsize; ++l)
                    {
                        const float value = *(pNewData + l*ksize*jsize*isize + k*jsize*isize + j*isize + i);
                        const float reference = dataF[i*istride + j*jstride + k*kstride + l*lstride];
                        ASSERT_EQ(reference, value) << "i=" << i << ", j=" << j << ", k=" << k << ", l=" << l;
                    }
    }

    // Second savepoint: some fields are rewritten, others are kept the same
    dataD[0] += 1.;
    dataF[0] += 1.;
    // ik does not change
    // jk does not change
    // ikl changes
    // ijkl changes

    sp.Init("SecondSavepoint");

    serializer.WriteField(  "ik", sp,   pIKData, ibstrided, jbstrided, kbstrided, lbstrided);
    serializer.WriteField(  "jk", sp,   pJKData, ibstridef, jbstridef, kbstridef, lbstridef);
    serializer.WriteField( "ikl", sp,  pIKLData, ibstrided, jbstrided, kbstrided, lbstrided);
    serializer.WriteField("ijkl", sp, pIJKLData, ibstridef, jbstridef, kbstridef, lbstridef);

    // Load offsettable again
    dbfs.open("Data.json");
    dbtxt = std::string((std::istreambuf_iterator<char>(dbfs)),
                         std::istreambuf_iterator<char>());
    dbfs.close();
    dbnode = libjson::parse(dbtxt);
    ASSERT_NO_THROW(offsettable = dbnode["OffsetTable"]);
    ASSERT_EQ(2, offsettable.size());

    ASSERT_EQ(std::string("SecondSavepoint"), offsettable[1]["__name"].as_string());
    ASSERT_EQ(1, offsettable[1]["__id"].as_int());
    ASSERT_EQ(4, offsettable[1]["__offsets"].size());
    for (int i = 0; i < 4; ++i)
    {
        const std::string fieldname = offsettable[1]["__offsets"][i].name();
        if (fieldname.size() < 3)
            ASSERT_EQ(0, offsettable[1]["__offsets"][i][0].as_int());
        else
            ASSERT_LT(0, offsettable[1]["__offsets"][i][0].as_int());
    }

    // Check binary data size
    for (int i = 0; i < 4; ++i)
    {
        const std::string fieldname = offsettable[1]["__offsets"][i].name();
        int filelength_expect;
        if (fieldname ==   "ik") filelength_expect = 1*sizeof(double)*isize*  1  *ksize*  1  ;
        if (fieldname ==   "jk") filelength_expect = 1*sizeof(float )*  1  *jsize*ksize*  1  ;
        if (fieldname ==  "ikl") filelength_expect = 2*sizeof(double)*isize*  1  *ksize*lsize;
        if (fieldname == "ijkl") filelength_expect = 2*sizeof(float )*isize*jsize*ksize*lsize;

        std::ifstream fs(("Data_" + fieldname + ".dat").c_str(), std::ios::binary);
        fs.seekg(0, fs.end);
        int filelength = fs.tellg();
        fs.close();
        ASSERT_EQ(filelength_expect, filelength);
    }

    // Cleanup files
    std::remove("Data.json");
    std::remove("Data_ik.dat");
    std::remove("Data_jk.dat");
    std::remove("Data_ikl.dat");
    std::remove("Data_ijkl.dat");
}
TEST_F(OffsetTableUnittest, TableToJSON)
{
    Savepoint sp0, sp1;
    sp0.Init("FastWavesUnittest.Divergence-in");
    sp0.AddMetainfo("LargeTimeStep", 1);
    sp0.AddMetainfo("RKStageNumber", 2);
    sp0.AddMetainfo("ldyn_bbc", false);
    sp1.Init("DycoreUnittest.DoStep-out");
    sp1.AddMetainfo("LargeTimeStep", 2);
    sp1.AddMetainfo("hd", (Real).5);

    // Just for the sake of getting some valid checksums
    double somedata[] = { 1.1, 2.2, 3.3, 4.4 };

    // Fill table
    OffsetTable table;
    ASSERT_NO_THROW(table.AddNewSavepoint(sp0, 0));
    ASSERT_NO_THROW(table.AddNewSavepoint(sp1, 1));
    ASSERT_NO_THROW(table.AddFieldRecord(sp0, "Field1",   0, computeChecksum(somedata, 4)));
    ASSERT_NO_THROW(table.AddFieldRecord(  0, "Field2",   0, computeChecksum(somedata, 8)));
    ASSERT_NO_THROW(table.AddFieldRecord(  1, "Field1", 100, computeChecksum(somedata, 12)));
    ASSERT_NO_THROW(table.AddFieldRecord(sp1, "Field2", 100, computeChecksum(somedata, 16)));

    //Generate table JSON
    JSONNode tableNode = table.TableToJSON();
    ASSERT_EQ(JSON_ARRAY, tableNode.type());
    ASSERT_EQ(2, tableNode.size());

    // Check first savepoint
    ASSERT_EQ(std::string("FastWavesUnittest.Divergence-in"), tableNode[0]["__name"].as_string());
    ASSERT_EQ(0, tableNode[0]["__id"].as_int());
    ASSERT_EQ(1, tableNode[0]["LargeTimeStep"].as_int());
    ASSERT_EQ(2, tableNode[0]["RKStageNumber"].as_int());
    ASSERT_FALSE(tableNode[0]["ldyn_bbc"].as_bool());
    ASSERT_EQ((int)JSON_ARRAY, (int)tableNode[0]["__offsets"][0].type());
    ASSERT_EQ(std::string("Field1"), tableNode[0]["__offsets"][0].name());
    ASSERT_EQ(0, tableNode[0]["__offsets"][0][0].as_int());
    ASSERT_EQ(computeChecksum(somedata, 4), tableNode[0]["__offsets"][0][1].as_string());
    ASSERT_EQ(0, tableNode[0]["__offsets"][1][0].as_int());
    ASSERT_EQ(computeChecksum(somedata, 8), tableNode[0]["__offsets"][1][1].as_string());

    // Check second savepoint
    ASSERT_EQ(std::string("DycoreUnittest.DoStep-out"), tableNode[1]["__name"].as_string());
    ASSERT_EQ(1, tableNode[1]["__id"].as_int());
    ASSERT_EQ(2, tableNode[1]["LargeTimeStep"].as_int());
    ASSERT_EQ(0.5, tableNode[1]["hd"].as_float());
    ASSERT_EQ((int)JSON_ARRAY, (int)tableNode[1]["__offsets"][0].type());
    ASSERT_EQ(std::string("Field1"), tableNode[1]["__offsets"][0].name());
    ASSERT_EQ(100, tableNode[1]["__offsets"][0][0].as_int());
    ASSERT_EQ(computeChecksum(somedata, 12), tableNode[1]["__offsets"][0][1].as_string());
    ASSERT_EQ(100, tableNode[1]["__offsets"][1][0].as_int());
    ASSERT_EQ(computeChecksum(somedata, 16), tableNode[1]["__offsets"][1][1].as_string());

    // Interpret JSON for table
    OffsetTable table2;
    table2.TableFromJSON(tableNode);

    // Check savepoints
    std::vector<Savepoint> const & sp = GetSavepoints(table2);
    ASSERT_EQ(2, sp.size());
    ASSERT_EQ(sp0, sp[0]);
    ASSERT_EQ(sp1, sp[1]);
    ASSERT_EQ(0, table2.GetSavepointID(sp[0]));
    ASSERT_EQ(1, table2.GetSavepointID(sp[1]));

    // Check methods
    ASSERT_EQ(  0, table2.GetOffset(sp0, "Field1"));
    ASSERT_EQ(  0, table2.GetOffset(sp0, "Field2"));
    ASSERT_EQ(100, table2.GetOffset(sp1, "Field1"));
    ASSERT_EQ(100, table2.GetOffset(sp1, "Field2"));
    ASSERT_EQ(  0, table2.AlreadySerialized("Field1", computeChecksum(somedata, 4)));
    ASSERT_EQ(100, table2.AlreadySerialized("Field1", computeChecksum(somedata, 12)));
    ASSERT_EQ(  0, table2.AlreadySerialized("Field2", computeChecksum(somedata, 8)));
    ASSERT_EQ(100, table2.AlreadySerialized("Field2", computeChecksum(somedata, 16)));
    ASSERT_EQ( -1, table2.AlreadySerialized("Field1", computeChecksum(somedata, 8)));
    ASSERT_EQ( -1, table2.AlreadySerialized("Field2", computeChecksum(somedata, 4)));
}
TEST_F(FieldsTableUnittest, Serilalize)
{
    // Filling some info
    DataFieldInfo info1, info2;

    info1.Init(
            "Field1", "double", 8, 3,
            38, 24, 60, 1,
            3, 3, 3, 3, 0, 0, 0, 0);
    info1.AddMetainfo("ADV", true);

    info2.Init(
            "Field2", "double", 8, 3,
            38, 24, 61, 1,
            3, 3, 3, 3, 0, 1, 0, 0);
    info2.AddMetainfo("Init", 7.4);

    // Registering fields into table
    table.RegisterField(info1);
    table.RegisterField(info2);

    // Checking table
    ASSERT_EQ(2, getTableSize());
    ASSERT_TRUE(table.HasField("Field1"));
    ASSERT_TRUE(table.HasField("Field2"));
    ASSERT_FALSE(table.HasField("Field3"));

    ASSERT_TRUE(info1 == table.Find("Field1"));
    ASSERT_TRUE(info2 == table.Find("Field2"));

    JSONNode node = table.TableToJSON();

    ASSERT_EQ(node.name(), "FieldsTable");
    ASSERT_EQ(node.type(), JSON_ARRAY);
    ASSERT_EQ(node.size(), 2);

    // Check first field info
    ASSERT_EQ(node[0].name(), "DataFieldInfo");
    ASSERT_EQ(node[0]["__name"].as_string(), "Field1");
    ASSERT_EQ(node[0]["__id"].as_int(), 0);
    ASSERT_EQ(node[0]["__isize"].as_int(), 38);
    ASSERT_EQ(node[0]["__jsize"].as_int(), 24);
    ASSERT_EQ(node[0]["__ksize"].as_int(), 60);
    ASSERT_EQ(node[0]["__iminushalosize"].as_int(), 3);
    ASSERT_EQ(node[0]["__iplushalosize"].as_int(), 3);
    ASSERT_EQ(node[0]["__jminushalosize"].as_int(), 3);
    ASSERT_EQ(node[0]["__jplushalosize"].as_int(), 3);
    ASSERT_EQ(node[0]["__kminushalosize"].as_int(), 0);
    ASSERT_EQ(node[0]["__kplushalosize"].as_int(), 0);
    ASSERT_EQ(node[0]["ADV"].as_bool(), true);

    // Check second field info
    ASSERT_EQ(node[1].name(), "DataFieldInfo");
    ASSERT_EQ(node[1]["__name"].as_string(), "Field2");
    ASSERT_EQ(node[1]["__id"].as_int(), 1);
    ASSERT_EQ(node[1]["__isize"].as_int(), 38);
    ASSERT_EQ(node[1]["__jsize"].as_int(), 24);
    ASSERT_EQ(node[1]["__ksize"].as_int(), 61);
    ASSERT_EQ(node[1]["__iminushalosize"].as_int(), 3);
    ASSERT_EQ(node[1]["__iplushalosize"].as_int(), 3);
    ASSERT_EQ(node[1]["__jminushalosize"].as_int(), 3);
    ASSERT_EQ(node[1]["__jplushalosize"].as_int(), 3);
    ASSERT_EQ(node[1]["__kminushalosize"].as_int(), 0);
    ASSERT_EQ(node[1]["__kplushalosize"].as_int(), 1);
    ASSERT_EQ(node[1]["Init"].as_float(), 7.4);
}
Example #8
0
bool JSONReader::parseMetadata(const std::string& text,
    const std::vector<std::shared_ptr<MetadataSchema>>& schemas,
    std::vector<std::shared_ptr<MetadataInternal>>& metadata)
{
    if(text.empty())
    {
        VMF_LOG_ERROR("Empty input JSON string");
        return false;
    }

    metadata.clear();

    JSONNode root;
    try
    {
        root = libjson::parse(text);
    }
    catch(...)
    {
        VMF_LOG_ERROR("Can't get JSON root");
        return false;
    }

    if(root.size() != 1)
    {
        VMF_LOG_ERROR("More than one JSON root");
        return false;
    }

    JSONNode localRootNode = root[0];

    if( localRootNode.name() == TAG_METADATA )
    {
        try
        {
            std::shared_ptr<MetadataInternal> spMetadata = parseMetadataFromNode(localRootNode, schemas);
            metadata.push_back(spMetadata);
        }
        catch(Exception& e)
        {
            VMF_LOG_ERROR("Exception: %s", e.what());
            return false;
        }
    }
    else if( localRootNode.name() == TAG_METADATA_ARRAY )
    {
        for(auto node = localRootNode.begin(); node != localRootNode.end(); node++)
        try
        {
            std::shared_ptr<MetadataInternal> spMetadata = parseMetadataFromNode(*node, schemas);
            metadata.push_back(spMetadata);
        }
        catch(Exception& e)
        {
            VMF_LOG_ERROR("Exception: %s", e.what());
            return false;
        }
    }
    else if( localRootNode.name() == TAG_VMF )
    {
        for(auto rootChildNode = localRootNode.begin(); rootChildNode != localRootNode.end(); rootChildNode++)
            if(rootChildNode->name() == TAG_METADATA_ARRAY )
                for(auto node = rootChildNode->begin(); node != rootChildNode->end(); node++)
                try
                {
                    std::shared_ptr<MetadataInternal> spMetadata = parseMetadataFromNode(*node, schemas);
                    metadata.push_back(spMetadata);
                }
                catch(Exception& e)
                {
                    VMF_LOG_ERROR("Exception: %s", e.what());
                    return false;
                }
    }

    return true;
}
Example #9
0
const map<uint64_t, set<string> > SLFKCondNode::match(const Model &m, ModelGetter *getter)
{
    map<uint64_t, set<string> > ret;
    if(m.attr<bool>("deleted")) return ret;
    //uint64_t fn = m.fullname();
    uint64_t gn = m.attr<uint64_t>(*operands[1].as_str());
    string type = m.type();

    string attr = *operands[3].as_str();
    JSONNode v = m.attr<JSONNode>(attr);
    if(v.type()==JSON_NODE && v.size()<=0){
        return ret;
    }

    if(!TypeConfig::is_subtype(type, *operands[0].as_str())){
        // this will not happens
        return ret;
    }

    if(TypeConfig::type_id(*operands[0].as_str())==0){
        gn = FULLNAME_SET_CAT(gn);
    }

    if(this->has_semantic_each()){
        switch(operands[4].as_coop_type()){
        case COND_OPER::EQ:
            {
                HQLOperand o = HQLOperand(v);
                SLFKCondNode n = SLFKCondNode(type,
                                              *operands[1].as_str(),
                                              etype,
                                              attr,
                                              operands[4],
                                              o);
                ret[gn].insert(n.cache_key());
                break;
            }
        case COND_OPER::CONTAINS:
            {
                string sep = operands[5].as_str_array()->at(1);
                string val = v.as_string();
                string unit;
                string::size_type start = 0, end = 0;
                while(end!=string::npos){
                    end = val.find_first_of(sep, start);
                    if(end==string::npos){
                        unit = val.substr(start);
                    }else{
                        unit = val.substr(start, end-start);
                    }
                    start = end + 1;
                    if(unit.size()<1) continue;
                    vector<string> *arg = new vector<string>;//({unit, sep});
                    arg->push_back("\"" + unit + "\"");
                    arg->push_back(sep);
                    HQLOperand o = HQLOperand(arg, HQLOperand::CONTAINS_ARG);

                    SLFKCondNode n = SLFKCondNode(type,
                                                  *operands[1].as_str(),
                                                  etype,
                                                  attr,
                                                  operands[4],
                                                  o);
                    ret[gn].insert(n.cache_key());
                }
                break;
            }
        default:
            {
                //this will not happens
                return ret;
            }
        }
    }else{
        if(operands[4].predicate(operands[5], v)){
            ret[gn].insert(this->cache_key());
        }
    }

    return ret;
}