void ConfigVisitor::Visit(const json::Array& array) { if (array.empty()) { Error<OptionJsonValueArray>("Cannot infer the type of an empty array"); return; } json::Object const& front = array.front(); if (front.size() != 1) { Error<OptionJsonValueArray>("Invalid array member"); return; } const std::string& array_type = front.begin()->first; if (array_type == "string") AddOptionValue(ReadArray(array, array_type, &OptionValueListString::SetListString)); else if (array_type == "int") AddOptionValue(ReadArray(array, array_type, &OptionValueListInt::SetListInt)); else if (array_type == "double") AddOptionValue(ReadArray(array, array_type, &OptionValueListDouble::SetListDouble)); else if (array_type == "bool") AddOptionValue(ReadArray(array, array_type, &OptionValueListBool::SetListBool)); else if (array_type == "color") AddOptionValue(ReadArray(array, array_type, &OptionValueListColor::SetListColor)); else Error<OptionJsonValueArray>("Array type not handled"); }
/// @brief Load MRU Lists. /// @param key List name. /// @param array json::Array of values. void MRUManager::Load(std::string const& key, const json::Array& array) { try { copy(array.begin(), array.end(), back_inserter(mru[key])); } catch (json::Exception const&) { // Out of date MRU file; just discard the data and skip it } Prune(key, mru[key]); }
SEXP create(const json::Array& value, Protect* pProtect) { // create the list SEXP listSEXP; pProtect->add(listSEXP = Rf_allocVector(VECSXP, value.size())); // add each array element to it for (json::Array::size_type i=0; i<value.size(); i++) { SEXP valueSEXP = create(value[i], pProtect); SET_VECTOR_ELT(listSEXP, i, valueSEXP); } return listSEXP; }
//------------------------------------------------------------------------------------------------------------------ bool Serializer::push(const Json::Array& _array, ostream& _oStream, size_t _tab) { _oStream << "[\n"; // Open braces // Push elements for(size_t i = 0; i < _array.size(); ++i) { if(!push(*_array[i], _oStream, _tab+1)) return false; // Error processing element if(i != _array.size()-1) // All elements but the last one _oStream << ','; _oStream << '\n'; } // Close braces tabify(_oStream, _tab); _oStream << ']'; return true; }
// extract a set of FilePath object from a list of home path relative strings Error extractFilePaths(const json::Array& files, std::vector<FilePath>* pFilePaths) { for(json::Array::const_iterator it = files.begin(); it != files.end(); ++it) { if (it->type() != json::StringType) return Error(json::errc::ParamTypeMismatch, ERROR_LOCATION); std::string file = it->get_str() ; pFilePaths->push_back(module_context::resolveAliasedPath(file)) ; } return Success() ; }
void getArchiveSettings(JSON::Object &obj, IDatabase *db, int clientid) { IQuery *q_get=db->Prepare("SELECT value FROM settings_db.settings WHERE clientid="+nconvert(clientid)+" AND key=?"); q_get->Bind("overwrite"); db_results res=q_get->Read(); q_get->Reset(); if(res.empty() || res[0][L"value"]!=L"true") clientid=0; q_get->Bind("overwrite_archive_settings"); res=q_get->Read(); if(res.empty() || res[0][L"value"]!=L"true") clientid=0; IQuery *q=db->Prepare("SELECT next_archival, interval, interval_unit, length, length_unit, backup_types FROM settings_db.automatic_archival WHERE clientid=?"); q->Bind(clientid); res=q->Read(); JSON::Array arr; for(size_t i=0;i<res.size();++i) { _i64 archive_next=watoi64(res[i][L"next_archival"]); JSON::Object ca; ca.set("next_archival", res[i][L"next_archival"]); ca.set("archive_every", watoi(res[i][L"interval"])); ca.set("archive_every_unit", res[i][L"interval_unit"]); ca.set("archive_for", watoi(res[i][L"length"])); ca.set("archive_for_unit", res[i][L"length_unit"]); ca.set("archive_backup_type", ServerAutomaticArchive::getBackupType(watoi(res[i][L"backup_types"]))); ca.set("archive_window", res[i][L"archive_window"]); if(archive_next>0 && clientid!=0) { _i64 tl=archive_next-(_i64)Server->getTimeSeconds(); ca.set("archive_timeleft", tl); } else { ca.set("archive_timeleft", "-"); } arr.add(ca); } obj.set("archive_settings", arr); }
inline core::Error readParam(const json::Array& params, unsigned int index, json::Value* pValue) { if (index >= params.size()) return core::Error(errc::ParamMissing, ERROR_LOCATION); *pValue = params[index] ; return Success(); }
Error setChunkDefs(const std::string& docPath, const std::string& docId, std::time_t docTime, const json::Array& newDefs) { // create JSON object wrapping json::Object chunkDefs; chunkDefs[kChunkDefs] = newDefs; chunkDefs[kChunkDocWriteTime] = static_cast<boost::int64_t>(docTime); // ensure we have a place to write the sidecar file FilePath defFile = chunkDefinitionsPath(docPath, docId, notebookCtxId()); // if there are no old chunk definitions and we aren't adding any new ones, // no work to do if (!defFile.exists() && newDefs.size() < 1) return Success(); // we're going to write something; make sure the parent folder exists Error error = defFile.parent().ensureDirectory(); if (error) return error; // get the old set of chunk IDs so we can clean up any not in the new set // of chunks std::vector<std::string> chunkIds; json::Value oldDefs; std::string oldContent; error = getChunkDefs(docPath, docId, notebookCtxId(), NULL, &oldDefs); if (error) LOG_ERROR(error); else if (oldDefs.type() == json::ArrayType) { if (oldDefs.get_array() == newDefs) { // definitions not changing; no work to do return Success(); } cleanChunks(chunkCacheFolder(docPath, docId), oldDefs.get_array(), newDefs); } std::ostringstream oss; json::write(chunkDefs, oss); error = writeStringToFile(defFile, oss.str()); if (error) { LOG_ERROR(error); return error; } return Success(); }
//loads the dungeon depending on the level in the initial state bool CDungeon::Load(const json::Array& regions, int level) //file read how the level should look like { _level = level; json::Array::const_iterator iter(regions.Begin()); json::Array::const_iterator iterEnd(regions.End()); for (; iter != iterEnd; ++iter) { const json::Object& region = *iter; const json::Number& posX = region["X"]; const json::Number& posY = region["Y"]; int regionX = posX; int regionY = posY; if (!_regions[regionX][regionY].Load(region)) return false; } return true; }
core::Error readParam(const json::Array& params, unsigned int index, T* pValue) { if (index >= params.size()) return core::Error(errc::ParamMissing, ERROR_LOCATION); if (!isType<T>(params[index])) return core::Error(errc::ParamTypeMismatch, ERROR_LOCATION) ; *pValue = params[index].get_value<T>(); return Success() ; }
static json::Object createFileLocation(const FileEntry &FE, json::Array &Files) { std::string FileURI = fileNameToURI(getFileName(FE)); // See if the Files array contains this URI already. If it does not, create // a new file object to add to the array. auto I = llvm::find_if(Files, [&](const json::Value &File) { if (const json::Object *Obj = File.getAsObject()) { if (const json::Object *FileLoc = Obj->getObject("fileLocation")) { Optional<StringRef> URI = FileLoc->getString("uri"); return URI && URI->equals(FileURI); } } return false; }); // Calculate the index within the file location array so it can be stored in // the JSON object. auto Index = static_cast<unsigned>(std::distance(Files.begin(), I)); if (I == Files.end()) Files.push_back(createFile(FE)); return json::Object{{"uri", FileURI}, {"fileIndex", Index}}; }
std::unique_ptr<OptionValue> ConfigVisitor::ReadArray(json::Array const& src, std::string const& array_type, void (OptionValueType::*)(const std::vector<ValueType>&)) { std::vector<ValueType> arr; arr.reserve(src.size()); for (json::Object const& obj : src) { if (obj.size() != 1) { Error<OptionJsonValueArray>("Invalid array member"); return 0; } if (obj.begin()->first != array_type) { Error<OptionJsonValueArray>("Attempt to insert value into array of wrong type"); return 0; } arr.push_back(ValueType(obj.begin()->second)); } return util::make_unique<OptionValueType>(name, arr); }
void Scene::parseObjects(const json::Array &arr) { for (int i = 0; i < arr.size(); i++) { json::Object o = arr[i].get_obj(); string type = get_str(o, "type"); // create an object based on the object string if (type == "plane") { int idx = getMaterialIdx(get_str(o, "material")); Vector n = get_vector(get_value(o, "normal").get_array()); float dist = get_float(o, "distance"); m_shapes.push_back(new Plane(idx, n, dist)); } if (type == "sphere") { Vector v = get_vector(get_value(o, "center").get_array()); float rad = get_float(o, "radius"); int idx = getMaterialIdx(get_str(o, "material")); m_shapes.push_back(new Sphere(idx, v, rad)); } } }
//retrive all documemnts of a class int ElasticSearch::fullScan(const std::string& index, const std::string& type, const std::string& query, Json::Array& resultArray, int scrollSize) { // Get the scroll id std::stringstream scrollUrl; scrollUrl << index << "/" << type << "/_search?search_type=scan&scroll=10m&size=" << scrollSize; Json::Object scrollObject; _http.post(scrollUrl.str().c_str(),query.c_str(),&scrollObject); if(!scrollObject.member("hits")) EXCEPTION("Result corrupted, no member \"hits\"."); if(!scrollObject.getValue("hits").getObject().member("total")) EXCEPTION("Result corrupted, no member \"total\" nested in \"hits\"."); int total = scrollObject.getValue("hits").getObject().getValue("total").getInt(); std::string scrollId = scrollObject["_scroll_id"].getString(); int count = 0; while(count < total) { Json::Object result; _http.rawpost("_search/scroll?scroll=10m", scrollId.c_str(), &result); // Kepp the new scroll id we received to inject in the next iteration. scrollId = result["_scroll_id"].getString(); for(const Json::Value& value : result["hits"].getObject()["hits"].getArray()){ resultArray.addElement(value); ++count; } } if(count != total) EXCEPTION("Result corrupted, total is different from count."); return total; }
static void foreach_exif_entry( ExifEntry * entry , void * _closure ) { if ( ! entry ) { return; } //......................................................................... // Bail out of types we don't handle switch( entry->format ) { case EXIF_FORMAT_UNDEFINED: case EXIF_FORMAT_FLOAT: case EXIF_FORMAT_DOUBLE: return; default: break; } //......................................................................... unsigned char component_size = exif_format_get_size( entry->format ); ExifIfd ifd = exif_content_get_ifd( entry->parent ); const char * tag_name = exif_tag_get_name_in_ifd( entry->tag , ifd ); if ( ! tag_name || ! entry->data || ! entry->size || ! component_size || ! entry->components ) { return; } //......................................................................... // Add a prefix based on the IFD String name( tag_name ); switch( ifd ) { case EXIF_IFD_0: name = "IMAGE/" + name; break; case EXIF_IFD_1: name = "THUMBNAIL/" + name; break; case EXIF_IFD_EXIF: name = "EXIF/" + name; break; case EXIF_IFD_GPS: name = "GPS/" + name; break; case EXIF_IFD_INTEROPERABILITY: name = "INTEROP/" + name; break; default: return; } ExifClosure * closure = ( ExifClosure * ) _closure; JSON::Object * tags = closure->tags; //......................................................................... // ASCII ones are easy if ( entry->format == EXIF_FORMAT_ASCII ) { (*tags)[ name ] = String( ( const char * ) entry->data , entry->size ); return; } //......................................................................... if ( ( entry->components * component_size ) != entry->size ) { return; } ExifByteOrder byte_order = exif_data_get_byte_order( closure->exif_data ); const unsigned char * data = entry->data; JSON::Array array; for ( unsigned long i = 0; i < entry->components; ++i ) { switch( entry->format ) { case EXIF_FORMAT_BYTE: array.append( JSON::Value( int( * data ) ) ); break; case EXIF_FORMAT_SHORT: array.append( JSON::Value( int( exif_get_short( data , byte_order ) ) ) ); break; case EXIF_FORMAT_LONG: array.append( JSON::Value( int( exif_get_long( data , byte_order ) ) ) ); break; case EXIF_FORMAT_SBYTE: array.append( JSON::Value( int( * ( ( const char * ) data ) ) ) ); break; case EXIF_FORMAT_SSHORT: array.append( JSON::Value( exif_get_sshort( data , byte_order ) ) ); break; case EXIF_FORMAT_SLONG: array.append( JSON::Value( exif_get_slong( data , byte_order ) ) ); break; // TODO: I don't like representing a rational number as a string with a slash, case EXIF_FORMAT_SRATIONAL: { ExifSRational r = exif_get_srational( data , byte_order ); array.append( Util::format("%ld/%ld" , r.numerator , r.denominator ) ); break; } case EXIF_FORMAT_RATIONAL: { ExifRational r = exif_get_rational( data , byte_order ); array.append( Util::format("%lu/%lu" , r.numerator , r.denominator ) ); break; } default: break; } data += component_size; } if ( array.size() == 1 ) { (*tags)[ name ] = array[ 0 ]; } else if ( array.size() > 1 ) { (*tags)[ name ] = array; } }
bool Item::doProcessSearchResult(const QByteArray &response, int &newPagesCount) { QByteArray resp = response; QBuffer buffer(&resp); buffer.open(QIODevice::ReadOnly); try { JSON::Reader reader(&buffer); JSON::Object *rootObject, *object; JSON::Array *array; JSON::Value *value; rootObject = dynamic_cast<JSON::Object *> (reader.element()); if (!rootObject) { qDebug() << metaObject()->className() << ": cannot find root object"; return false; } if (( (value = dynamic_cast<JSON::Value *> (rootObject->value("responseDetails"))) ) && !value->toString().compare("out of range start")) { newPagesCount = 1; return false; } rootObject = dynamic_cast<JSON::Object *> (rootObject->value("responseData")); if (!rootObject) { qDebug() << metaObject()->className() << ": cannot find responseData object"; return false; } array = dynamic_cast<JSON::Array *> (rootObject->value("results")); if (!array || !array->size()) { qDebug() << metaObject()->className() << ": cannot find photos array"; return false; } object = dynamic_cast<JSON::Object *> (array->at(0)); if (!object) { qDebug() << metaObject()->className() << ": cannot find object inside array"; return false; } value = dynamic_cast<JSON::Value *> (object->value("imageId")); if (value) _photoId = value->toString(); else qDebug() << metaObject()->className() << ": cannot find imageId"; value = dynamic_cast<JSON::Value *> (object->value("url")); if (value) _photoUrl = value->toString(); else { value = dynamic_cast<JSON::Value *> (object->value("unescapedUrl")); if (value) _photoUrl = value->toString(); else qDebug() << metaObject()->className() << ": cannot find url/unescapedUrl"; } value = dynamic_cast<JSON::Value *> (object->value("originalContextUrl")); if (value) _sourceUrl = value->toString(); else qDebug() << metaObject()->className() << ": cannot find originalContextUrl"; value = dynamic_cast<JSON::Value *> (object->value("title")); if (value) photoTitle = value->toString(); else { value = dynamic_cast<JSON::Value *> (object->value("titleNoFormatting")); if (value) photoTitle = value->toString(); else qDebug() << metaObject()->className() << ": cannot find title/titleNoFormatting"; } value = dynamic_cast<JSON::Value *> (object->value("width")); if (value) photoSize.setWidth(value->toInt()); else qDebug() << metaObject()->className() << ": cannot find width"; value = dynamic_cast<JSON::Value *> (object->value("height")); if (value) photoSize.setHeight(value->toInt()); else qDebug() << metaObject()->className() << ": cannot find height"; value = dynamic_cast<JSON::Value *> (object->value("content")); if (value) photoDescription = value->toString(); else { value = dynamic_cast<JSON::Value *> (object->value("contentNoFormatting")); if (value) photoDescription = value->toString(); else qDebug() << metaObject()->className() << ": cannot find content/contentNoFormatting"; } object = dynamic_cast<JSON::Object *> (rootObject->value("cursor")); value = dynamic_cast<JSON::Value *> (object->value("estimatedResultCount")); newPagesCount = value->toInt(); } catch (const QString &message) { qDebug() << message; } return (!_photoId.isEmpty() && !_lastPhotoIds.contains(_photoId)); }
static Vector get_vector(const json::Array &a) { assert(a.size() == 3); return Vector(a[0].get_real(), a[1].get_real(), a[2].get_real()); }
void clear() { method.clear() ; params.clear() ; kwparams.clear() ; }
/// @brief Load MRU Lists. /// @param key List name. /// @param array json::Array of values. void MRUManager::Load(const std::string &key, const json::Array& array) { transform(array.Begin(), array.End(), back_inserter(mru[key]), cast_str); Prune(mru[key]); }
void FBXJSONSerializer::recurse_over_model(fbxsdk_2012_1::KFbxNode *fbx_node, json::Array& meshes_array) { KFbxMesh* old_mesh = fbx_node->GetMesh(); if (old_mesh) { KFbxGeometryConverter converter(fbx_node->GetFbxSdkManager()); KFbxMesh* mesh = converter.TriangulateMesh(old_mesh); mesh->ComputeBBox(); mesh->ComputeVertexNormals(); Object json_mesh; { KFbxLayerElementUV* uvs = mesh->GetLayer(0)->GetUVs(); KFbxLayerElementNormal* normals = mesh->GetLayer(0, KFbxLayerElement::eNORMAL)->GetNormals(); Array json_mesh_normals; Array json_mesh_uvs; Array json_mesh_vertices; int polygon_count = mesh->GetPolygonCount(); for (int poly_index = 0; poly_index < polygon_count; poly_index++) { for (int vertex_index = 0; vertex_index < mesh->GetPolygonSize(poly_index); vertex_index++) { int vertex_position = mesh->GetPolygonVertex(poly_index, vertex_index); KFbxVector4 vertex = mesh->GetControlPoints()[vertex_position]; Number vertex_x = vertex.GetAt(0); json_mesh_vertices.Insert(vertex_x); Number vertex_y = vertex.GetAt(1); json_mesh_vertices.Insert(vertex_y); Number vertex_z = vertex.GetAt(2); json_mesh_vertices.Insert(vertex_z); KFbxVector4 normal = normals->GetDirectArray()[vertex_position]; Number normal_x = normal.GetAt(0); json_mesh_normals.Insert(normal_x); Number normal_y = normal.GetAt(1); json_mesh_normals.Insert(normal_y); Number normal_z = normal.GetAt(2); json_mesh_normals.Insert(normal_z); if (uvs) { int mesh_index = mesh->GetTextureUVIndex(poly_index, vertex_index); KFbxVector2 uv = uvs->GetDirectArray().GetAt(mesh_index); Number uv_x = 1.0f-uv[0]; json_mesh_uvs.Insert(uv_x); // these are flipped Number uv_y = 1.0f-uv[1]; json_mesh_uvs.Insert(uv_y); } } } json_mesh["vertices"] = json_mesh_vertices; json_mesh["normals"] = json_mesh_normals; json_mesh["uvs"] = json_mesh_uvs; } { fbxDouble3 scale = fbx_node->LclScaling.Get(); Object json_mesh_scale; json_mesh_scale["x"] = Number(scale[0]); json_mesh_scale["y"] = Number(scale[1]); json_mesh_scale["z"] = Number(scale[2]); json_mesh["scale"] = json_mesh_scale; } { fbxDouble3 translation = fbx_node->LclTranslation.Get(); Object json_mesh_translation; json_mesh_translation["x"] = Number(translation[0]); json_mesh_translation["y"] = Number(translation[1]); json_mesh_translation["z"] = Number(translation[2]); json_mesh["translation"] = json_mesh_translation; } { fbxDouble3 rotation = fbx_node->LclRotation.Get(); Object json_mesh_rotation; json_mesh_rotation["x"] = Number(rotation[0]); json_mesh_rotation["y"] = Number(rotation[1]); json_mesh_rotation["z"] = Number(rotation[2]); json_mesh["rotation"] = json_mesh_rotation; } { int material_count = fbx_node->GetMaterialCount(); Array json_mesh_materials; for (int material_index = 0; material_index < material_count; material_index++) { KFbxSurfaceMaterial* surface_material = fbx_node->GetMaterial(material_index); Object json_material; Array json_textures; int textureIndex = 0; FOR_EACH_TEXTURE(textureIndex) { KFbxProperty property = surface_material->FindProperty(KFbxLayerElement::TEXTURE_CHANNEL_NAMES[textureIndex]); int layered_texture_count = property.GetSrcObjectCount(KFbxTexture::ClassId); for (int layered_texture_index = 0; layered_texture_index < layered_texture_count; ++layered_texture_index) { KFbxTexture* texture = KFbxCast <KFbxTexture> (property.GetSrcObject(KFbxTexture::ClassId, layered_texture_index)); if(texture) { KFbxFileTexture *file_texture = KFbxCast<KFbxFileTexture>(texture); if (file_texture) { Object json_texture; json_texture["filename"] = String(file_texture->GetFileName()); json_textures.Insert(json_texture); } } } } json_material["textures"] = json_textures; KFbxSurfaceLambert* lambert_material = KFbxCast<KFbxSurfaceLambert>(surface_material); if (lambert_material) { Object diffuse; double diffuse_r = lambert_material->Diffuse.Get()[0]; diffuse["r"] = Number(diffuse_r); double diffuse_g = lambert_material->Diffuse.Get()[1]; diffuse["g"] = Number(diffuse_g); double diffuse_b = lambert_material->Diffuse.Get()[2]; diffuse["b"] = Number(diffuse_b); json_material["diffuse"] = diffuse; Object ambient; double ambient_r = lambert_material->Ambient.Get()[0]; ambient["r"] = Number(ambient_r); double ambient_g = lambert_material->Ambient.Get()[1]; ambient["g"] = Number(ambient_g); double ambient_b = lambert_material->Ambient.Get()[2]; ambient["b"] = Number(ambient_b); json_material["ambient"] = ambient; KFbxProperty specular_property = lambert_material->FindProperty("SpecularColor"); fbxDouble3 specular_data; specular_property.Get(&specular_data, eDOUBLE3); Object specular; float specular_r = specular_data[0]; specular["r"] = Number(specular_r); float specular_g = specular_data[1]; specular["g"] = Number(specular_g); float specular_b = specular_data[2]; specular["b"] = Number(specular_b); json_material["specular"] = specular; } json_mesh_materials.Insert(json_material); } json_mesh["materials"] = json_mesh_materials; } json_mesh["uv_stride"] = Number(2); json_mesh["vertex_stride"] = Number(3); json_mesh["normal_stride"] = Number(3); meshes_array.Insert(json_mesh); } for(int j = 0; j < fbx_node->GetChildCount(); j++) { recurse_over_model(fbx_node->GetChild(j), meshes_array); } }