static interface::ModuleMeta load_module_meta(const json::Value &v) { interface::ModuleMeta r; r.disable_cpp = v.get("disable_cpp").as_boolean(); r.cxxflags = v.get("cxxflags").as_string(); r.ldflags = v.get("ldflags").as_string(); r.cxxflags_windows = v.get("cxxflags_windows").as_string(); r.ldflags_windows = v.get("ldflags_windows").as_string(); r.cxxflags_linux = v.get("cxxflags_linux").as_string(); r.ldflags_linux = v.get("ldflags_linux").as_string(); const json::Value &deps_v = v.get("dependencies"); for(unsigned int i = 0; i < deps_v.size(); i++){ const json::Value &dep_v = deps_v.at(i); interface::ModuleDependency dep = load_module_dependency(deps_v.at(i)); r.dependencies.push_back(dep); } const json::Value &rev_deps_v = v.get("reverse_dependencies"); for(unsigned int i = 0; i < rev_deps_v.size(); i++){ interface::ModuleDependency dep = load_module_dependency(rev_deps_v.at(i)); r.reverse_dependencies.push_back(dep); } return r; }
void MadeupWindow::loadPreferences() { canvas->makeCurrent(); try { std::ifstream in(config_path); Json::Reader reader; Json::Value prefs; reader.parse(in, prefs); int width = prefs.get("window.width", 1200).asUInt(); int height = prefs.get("window.height", 600).asUInt(); resize(width, height); int x = prefs.get("window.x", -1).asInt(); int y = prefs.get("window.y", -1).asInt(); if (x >= 0 && y >= 0) { move(x, y); } float stroke_width = (float) prefs.get("path.stroke.width", renderer->getPathStrokeWidth()).asDouble(); renderer->setPathStrokeWidth(stroke_width); path_stroke_width_spinner->setValue(stroke_width); float vertex_size = (float) prefs.get("vertex.size", renderer->getVertexSize()).asDouble(); renderer->setVertexSize(vertex_size); vertex_size_spinner->setValue(vertex_size); string font_face = prefs.get("font.face", "Courier New").asString(); int font_size = prefs.get("font.size", 18).asUInt(); QFont font; font.setFamily(font_face.c_str()); font.setPointSize(font_size); editor->setFont(font); console->setFont(font); renderer->showHeading(prefs.get("show.heading", renderer->showHeading()).asBool()); show_heading_checkbox->setChecked(renderer->showHeading()); renderer->showHeading(prefs.get("show.path", renderer->showPath()).asBool()); show_path_checkbox->setChecked(renderer->showPath()); renderer->showStops(prefs.get("show.stops", renderer->showStops()).asBool()); show_stops_checkbox->setChecked(renderer->showStops()); Json::Value show_axis_node = prefs.get("show.axis", Json::nullValue); Json::Value show_grid_node = prefs.get("show.grid", Json::nullValue); Json::Value grid_extent_node = prefs.get("grid.extent", Json::nullValue); Json::Value grid_spacing_node = prefs.get("grid.spacing", Json::nullValue); for (int d = 0; d < 3; ++d) { bool show_axis = show_axis_node.get(Json::ArrayIndex(d), renderer->showAxis(d)).asBool(); renderer->showAxis(d, show_axis); show_axis_checkboxes[d]->setChecked(renderer->showAxis(d)); bool show_grid = show_grid_node.get(Json::ArrayIndex(d), renderer->showGrid(d)).asBool(); renderer->showGrid(d, show_grid); show_grid_checkboxes[d]->setChecked(renderer->showGrid(d)); float grid_extent = (float) grid_extent_node.get(Json::ArrayIndex(d), renderer->getGridExtent(d)).asDouble(); renderer->setGridExtent(d, grid_extent); grid_extent_spinners[d]->setValue(renderer->getGridExtent(d)); float grid_spacing = (float) grid_spacing_node.get(Json::ArrayIndex(d), renderer->getGridSpacing(d)).asDouble(); renderer->setGridSpacing(d, grid_spacing); grid_spacing_spinners[d]->setValue(renderer->getGridSpacing(d)); } // Background color Json::Value background_color_node = prefs.get("background.color", Json::nullValue); if (!background_color_node.isNull()) { td::QVector4<float> color = renderer->getBackgroundColor(); for (int i = 0; i < 4; ++i) { color[i] = (float) background_color_node.get(i, 0.0).asDouble(); } renderer->setBackgroundColor(color); } QPalette background_color_palette; background_color_palette.setColor(QPalette::Button, toQColor(renderer->getBackgroundColor())); background_color_button->setPalette(background_color_palette); // Path color Json::Value path_color_node = prefs.get("path.color", Json::nullValue); if (!path_color_node.isNull()) { td::QVector4<float> color = renderer->getPathColor(); for (int i = 0; i < 4; ++i) { color[i] = (float) path_color_node.get(i, 0.0).asDouble(); } renderer->setPathColor(color); } QPalette path_color_palette; path_color_palette.setColor(QPalette::Button, toQColor(renderer->getPathColor())); path_color_button->setPalette(path_color_palette); // Vertex color Json::Value vertex_color_node = prefs.get("vertex.color", Json::nullValue); if (!vertex_color_node.isNull()) { td::QVector4<float> color = renderer->getPathColor(); for (int i = 0; i < 4; ++i) { color[i] = (float) vertex_color_node.get(i, 0.0).asDouble(); } renderer->setVertexColor(color); } QPalette vertex_color_palette; vertex_color_palette.setColor(QPalette::Button, toQColor(renderer->getVertexColor())); vertex_color_button->setPalette(vertex_color_palette); // Face style int face_style = prefs.get("render.style", renderer->getRenderStyle()).asUInt(); renderer->setRenderStyle(face_style); face_style_picker->setCurrentIndex(renderer->getRenderStyle()); // Show preferences bool show_settings = prefs.get("show.settings", false).asBool(); action_settings->setChecked(show_settings); // Show console bool show_console = prefs.get("show.console", true).asBool(); show_console_checkbox->setChecked(show_console); int settings_page = prefs.get("settings.page", 0).asUInt(); settings_picker->setCurrentIndex(settings_page); double autopathify_delay = prefs.get("autopathify.delay", 0.25).asDouble(); autopathify_delay_spinner->setValue(autopathify_delay); float axis_stroke_width = (float) prefs.get("axis.stroke.width", renderer->getAxisStrokeWidth()).asDouble(); axis_stroke_width_spinner->setValue(axis_stroke_width); float grid_stroke_width = (float) prefs.get("grid.stroke.width", renderer->getGridStrokeWidth()).asDouble(); grid_stroke_width_spinner->setValue(grid_stroke_width); bool autopathify = prefs.get("autopathify", true).asBool(); autopathify_checkbox->setChecked(autopathify); console->setVisible(show_console); bool faceted = prefs.get("show.faceted", true).asBool(); faceted_checkbox->setChecked(faceted); bool autorotate = prefs.get("autorotate", renderer->hasAutorotate()).asBool(); has_autorotate_checkbox->setChecked(autorotate); bool has_specular = prefs.get("light.has.specular", renderer->hasSpecular()).asBool(); // TODO renderer->hasSpecular(has_specular); has_specular_checkbox->setChecked(renderer->hasSpecular()); bool is_two_sided = prefs.get("light.two.sided", renderer->isTwoSided()).asBool(); is_two_sided_checkbox->setChecked(is_two_sided); double azimuth_angle = prefs.get("light.azimuth.angle", renderer->getAzimuthAngle()).asDouble(); azimuth_angle_spinner->setValue(azimuth_angle); double elevation_angle = prefs.get("light.elevation.angle", renderer->getElevationAngle()).asDouble(); elevation_angle_spinner->setValue(elevation_angle); double shininess = prefs.get("light.shininess", renderer->getShininess()).asDouble(); shininess_spinner->setValue(shininess); double light_distance_factor = prefs.get("light.distance.factor", renderer->getLightDistanceFactor()).asDouble(); light_distance_factor_spinner->setValue(light_distance_factor); // Horizontal splitter Json::Value horizontal_sizes_node = prefs.get("horizontal.splitter.sizes", Json::nullValue); if (!horizontal_sizes_node.isNull()) { QList<int> sizes; sizes.push_back(horizontal_sizes_node.get(Json::ArrayIndex(0), -1).asUInt()); sizes.push_back(horizontal_sizes_node.get(Json::ArrayIndex(1), -1).asUInt()); sizes.push_back(horizontal_sizes_node.get(Json::ArrayIndex(2), -1).asUInt()); horizontal_splitter->setSizes(sizes); } // Vertical splitter Json::Value vertical_sizes_node = prefs.get("vertical.splitter.sizes", Json::nullValue); if (!vertical_sizes_node.isNull()) { QList<int> sizes; sizes.push_back(vertical_sizes_node.get(Json::ArrayIndex(0), -1).asUInt()); sizes.push_back(vertical_sizes_node.get(Json::ArrayIndex(1), -1).asUInt()); vertical_splitter->setSizes(sizes); } last_directory = QString(prefs.get("last.directory", last_directory.toStdString()).asString().c_str()); } catch (int i) { } canvas->update(); }
void Physics::parsePrefab(json::Value& val) { const std::string bodyshape = val["shape"].asString(); const std::string bodytype = val["bodytype"].asString(); const bool isBullet = val.get("bullet", 0).asBool(); BodyType type; if(bodytype == "dynamic") type = Dynamic; else if(bodytype == "static") type = Static; else if(bodytype == "kinematic") type = Kinematic; else szerr << "Unsupported body type: " << bodytype << ErrorStream::error; if(bodyshape == "box") { createBox( static_cast<float>(val["size"][0U].asDouble()), static_cast<float>(val["size"][1U].asDouble()), type, isBullet ); } else if(bodyshape == "circle") { createCircle( static_cast<float>(val["radius"].asDouble()), type, isBullet ); } else if(bodyshape == "polygon") { std::vector<sf::Vector2f> vertices; sf::Uint32 count = val["vertices"].size(); for(sf::Uint32 i=0; i < count; i += 2) { vertices.push_back(sf::Vector2f( static_cast<float>(val["vertices"][i+0].asDouble()), static_cast<float>(val["vertices"][i+1].asDouble()) )); } createPolygon(vertices, type, isBullet); } else { szerr << "Unsupported body shape: " << bodyshape << ErrorStream::error; } finalizeBody( static_cast<float>(val["friction"].asDouble()), static_cast<float>(val["restitution"].asDouble()), static_cast<float>(val["density"].asDouble()) ); m_body->SetGravityScale( static_cast<float>(val.get("gravityscale", 1).asDouble()) ); setSpeedLimit( static_cast<float>(val.get("speedlimit", 0.f).asDouble()) ); }
bool cWSSCompact::LoadChunkFromData(const cChunkCoords & a_Chunk, int a_UncompressedSize, const AString & a_Data, cWorld * a_World) { // Crude data integrity check: if (a_UncompressedSize < cChunkDef::BlockDataSize) { LOGWARNING("Chunk [%d, %d] has too short decompressed data (%d bytes out of %d needed), erasing", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, a_UncompressedSize, cChunkDef::BlockDataSize ); EraseChunkData(a_Chunk); return false; } // Decompress the data: AString UncompressedData; int errorcode = UncompressString(a_Data.data(), a_Data.size(), UncompressedData, (size_t)a_UncompressedSize); if (errorcode != Z_OK) { LOGERROR("Error %d decompressing data for chunk [%d, %d]", errorcode, a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ ); return false; } if (a_UncompressedSize != (int)UncompressedData.size()) { LOGWARNING("Uncompressed data size differs (exp %d bytes, got " SIZE_T_FMT ") for chunk [%d, %d]", a_UncompressedSize, UncompressedData.size(), a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ ); return false; } cEntityList Entities; cBlockEntityList BlockEntities; bool IsLightValid = false; if (a_UncompressedSize > cChunkDef::BlockDataSize) { Json::Value root; // will contain the root value after parsing. Json::Reader reader; if ( !reader.parse( UncompressedData.data() + cChunkDef::BlockDataSize, root, false ) ) { LOGERROR("Failed to parse trailing JSON in chunk [%d, %d]!", a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ ); } else { LoadEntitiesFromJson(root, Entities, BlockEntities, a_World); IsLightValid = root.get("IsLightValid", false).asBool(); } } BLOCKTYPE * BlockData = (BLOCKTYPE *)UncompressedData.data(); NIBBLETYPE * MetaData = (NIBBLETYPE *)(BlockData + MetaOffset); NIBBLETYPE * BlockLight = (NIBBLETYPE *)(BlockData + LightOffset); NIBBLETYPE * SkyLight = (NIBBLETYPE *)(BlockData + SkyLightOffset); a_World->SetChunkData( a_Chunk.m_ChunkX, a_Chunk.m_ChunkZ, BlockData, MetaData, IsLightValid ? BlockLight : NULL, IsLightValid ? SkyLight : NULL, NULL, NULL, Entities, BlockEntities, false ); return true; }
void GUI::loadConfigInfo() { cout << "Loading Config Info\n"; Json::Value root; Json::Reader reader; ifstream config(configPath.c_str()); //read all of config file. string configContents; if (config) { config.seekg(0, std::ios::end); configContents.resize(config.tellg()); config.seekg(0, std::ios::beg); config.read(&configContents[0], configContents.size()); config.close(); } bool parsed = reader.parse(configContents, root); if(!parsed) return; //load glyph coloring int antB = root["color_map"]["glyph"]["antecedent"].get("blue", 105).asInt(); int antG = root["color_map"]["glyph"]["antecedent"].get("green", 0).asInt(); int antR = root["color_map"]["glyph"]["antecedent"].get("red", 0).asInt(); int consB = root["color_map"]["glyph"]["consequent"].get("blue", 0).asInt(); int consG = root["color_map"]["glyph"]["consequent"].get("green", 255).asInt(); int consR = root["color_map"]["glyph"]["consequent"].get("red", 0).asInt(); int conB = root["color_map"]["glyph"]["connect"].get("blue", 164).asInt(); int conG = root["color_map"]["glyph"]["connect"].get("green", 160).asInt(); int conR = root["color_map"]["glyph"]["connect"].get("red", 160).asInt(); int misB = root["color_map"]["glyph"]["missing"].get("blue", 164).asInt(); int misG = root["color_map"]["glyph"]["missing"].get("green", 0).asInt(); int misR = root["color_map"]["glyph"]["missing"].get("red", 160).asInt(); int ramp = root["color_map"].get("ramp", 2).asInt(); colorMappings->glyphAntecedentColor.setRed(antR); colorMappings->glyphAntecedentColor.setGreen(antG); colorMappings->glyphAntecedentColor.setBlue(antB); colorMappings->glyphConsequentColor.setRed(consR); colorMappings->glyphConsequentColor.setGreen(consG); colorMappings->glyphConsequentColor.setBlue(consB); colorMappings->glyphConnectColor.setRed(conR); colorMappings->glyphConnectColor.setGreen(conG); colorMappings->glyphConnectColor.setBlue(conB); colorMappings->glyphMissingColor.setRed(misR); colorMappings->glyphMissingColor.setGreen(misG); colorMappings->glyphMissingColor.setBlue(misB); colorMappings->setColorRamp(this->colorManage->getPredefinedColorRamp(ramp)); //load graph settings. int granularity = root.get("granularity", 50.0).asDouble(); bool redun = root.get("redundancies", false).asBool(); int ruleMode = root.get("rule_mode", 0).asInt(); bool gridLines = root["graph"].get("grid_lines", true).asBool(); bool skyline = root["graph"].get("skyline", true).asBool(); int skylineCardinality = root["graph"].get("skyline_cardinality_value", 0).asInt(); evCont->setRuleMode(RuleMode(ruleMode)); this->truncateVal = granularity; evCont->updateRedundancy(redun); graphInstance->setConfig(gridLines, skyline, redun, RuleMode(ruleMode), skylineCardinality,granularity); //load default file. string defaultFile = root.get("default_file", "").asString(); bool successfullyLoaded = pInstance->loadFile(defaultFile); if(successfullyLoaded) updateForLoadedIndex(QString::fromStdString(defaultFile)); }
void SkillCharacter::loadFromJson( Json::Value p_json ) { this->setIdSkillCharacter(p_json.get(SKILL_JSON_IDCHARACTERSKILL, -1).asInt64()); this->setLevel(p_json.get(SKILL_JSON_LEVEL, 0).asInt()); this->setSkill(FactoryGet::getSkillFactory()->getSkill(p_json.get(SKILL_JSON_IDSKILL, -1).asInt())); }
/* Method to read properties from file and Initialize to Properties Instance. * Parameters are: * WIoTP Properties file path. * Properties instance to get initialized. */ bool IOTP_Client::InitializePropertiesFromFile(const std::string& filePath,Properties& prop) { std::string methodName = __func__; logger.debug(methodName+" Entry: "); bool rc = true; Json::Reader reader; Json::Value root; std::filebuf fb; if (fb.open(filePath, std::ios::in)) { std::istream is(&fb); if (!reader.parse(is, root)) { logger.error("Failed to parse configurations from input file: " +filePath); fb.close(); rc = false; } else { fb.close(); std::string org = root.get("Organization-ID", "").asString(); if (org.size() == 0) { logger.error("Failed to parse Organization-ID from given configuration."); rc = false; } else prop.setorgId(org); std::string domain = root.get("Domain", "").asString(); if (domain.size() != 0) prop.setdomain(domain); std::string deviceType = root.get("Device-Type", "").asString(); if (deviceType.size() == 0) { logger.error("Failed to parse Device-Type from given configuration."); rc = false; } else prop.setdeviceType(deviceType); std::string deviceId = root.get("Device-ID", "").asString(); if (deviceId.size() == 0) { logger.error("Failed to parse Device-ID from given configuration."); rc = false; } else prop.setdeviceId(deviceId); std::string customPort = root.get("Port", "8883").asString(); if (customPort.size() == 0){ logger.error("Failed to parse useClientCertificates from given configuration."); rc = false; } else{ if (prop.getorgId().compare("quickstart") == 0) prop.setPort(1883); else prop.setPort(std::stoi(customPort)); } if(org.compare("quickstart") != 0) { std::string username = root.get("Authentication-Method", "").asString(); if (username.size() == 0) { logger.error("Failed to parse username from given configuration."); rc = false; } else prop.setauthMethod(username); std::string password = root.get("Authentication-Token", "").asString(); if (password.size() == 0) { logger.error("Failed to parse password from given configuration."); rc = false; } else prop.setauthToken(password); std::string trustStore = root.get("clientTrustStorePath", "").asString(); if (trustStore.size() == 0) { logger.error("Failed to parse clientTrustStorePath from given configuration."); rc = false; } else prop.settrustStore(trustStore); std::string useCerts = root.get("useClientCertificates", "false").asString(); if (useCerts.size() == 0){ logger.error("Failed to parse useClientCertificates from given configuration."); rc = false; } else{ if (useCerts.compare("true") == 0) prop.setuseCerts(true); else prop.setuseCerts(false); } if (prop.getuseCerts()){ std::string keyStore = root.get("clientCertPath","").asString(); if (keyStore.size() == 0){ logger.error("Failed to parse clientCertPath from given configuration."); rc = false; } else prop.setkeyStore(keyStore); std::string privateKey = root.get("clientKeyPath","").asString(); if (privateKey.size() == 0){ logger.error("Failed to parse clientKeyPath from given configuration."); rc = false; } else prop.setprivateKey(privateKey); std::string passPhrase = root.get("clientKeyPassword","").asString(); prop.setkeyPassPhrase(passPhrase); } } } } else { console.error("Failed to open input file: " +filePath); rc = false; } logger.debug(methodName+" Exit: "); return rc; }
//--------------------------------------------------------------------------- void sgStagingCode::Deserialize( Json::Value& root ) { _table = root.get("table","").asString(); _code = root.get("code","").asString(); _isValid = root.get("is_valid","").asBool(); }
static inline std::string getSourceFromJson(Json::Value const &routingDirective) { return routingDirective.get(routing_keys::source(), "") .toStyledString(); }
static inline std::string getDestinationFromJson(Json::Value const &routingDirective) { return routingDirective.get(routing_keys::destination(), "").asString(); }
void lsd_server_t::process(ev::idle&, int) { if(m_socket.pending()) { zmq::message_t message; route_t route; do { m_socket.recv(&message); if(!message.size()) { break; } route.push_back( std::string( static_cast<const char*>(message.data()), message.size() ) ); } while(m_socket.more()); if(route.empty() || !m_socket.more()) { log().error("got a corrupted request - invalid route"); return; } while(m_socket.more()) { // Receive the envelope. m_socket.recv(&message); // Parse the envelope and setup the job policy. Json::Reader reader(Json::Features::strictMode()); Json::Value root; if(!reader.parse( static_cast<const char*>(message.data()), static_cast<const char*>(message.data()) + message.size(), root)) { log().error( "got a corrupted request - invalid envelope - %s", reader.getFormatedErrorMessages().c_str() ); continue; } client::policy_t policy( root.get("urgent", false).asBool(), root.get("timeout", 0.0f).asDouble(), root.get("deadline", 0.0f).asDouble() ); boost::shared_ptr<lsd_job_t> job; try { job.reset(new lsd_job_t(*this, policy, root.get("uuid", "").asString(), route)); } catch(const std::runtime_error& e) { log().error( "got a corrupted request - invalid envelope - %s", e.what() ); continue; } if(!m_socket.more()) { log().error("got a corrupted request - missing body"); job->process_event(events::error_t(client::request_error, "missing body")); continue; } m_socket.recv(&job->request()); m_engine.enqueue(job); } } else { m_processor.stop(); } }
void CProtectChildManager::LoadChildOnlineInfo() { try { std::string sJsonStr; std::string sTemp; //c++解析utf-8的json文件乱码,还是需要ascii std::string sFileName = GetAppPathA() + CHILD_FILE_NAME; if (!IsFileExistsA(sFileName.c_str())) return; ifstream configFile; configFile.open(sFileName); //-------------------------------- //-------------------------------- //-------------------------------- //--------这里死循环了 while (!configFile.eof()) { configFile >> sTemp; sJsonStr.append(sTemp); } configFile.close(); Json::Reader reader; Json::Value root; if (reader.parse(sJsonStr, root)) { if (root.isArray()) { std::string sCD; std::string sTemp; Json::Value childItem; Json::Value roleListItem; Json::Value roleItem; POnLineChild pChild = nullptr; POnLineRoleName pRoleName = nullptr; for (int i = 0; i < (int)root.size(); i++) { childItem = root.get(i, nullptr); if (childItem == nullptr) continue; sCD = childItem.get("CD", "").asString(); if (sCD.compare("") == 0) return; pChild = new TOnLineChild(); pChild->sIdentificationID = sCD; pChild->iOnLineSecond = childItem.get("OS", 0).asInt(); pChild->iOnLinePeriod = childItem.get("OP", 0).asInt(); pChild->uiLogoutTime = childItem.get("LT", 0).asInt64(); m_OnLineChildren.Add(sCD, pChild); roleListItem = childItem.get("RL", nullptr); if ((roleListItem != nullptr) && (roleListItem.isArray())) { for (int j = 0; j < (int)roleListItem.size(); j++) { roleItem = roleListItem.get(j, nullptr); if (roleItem == nullptr) continue; pRoleName = new TOnLineRoleName(); pRoleName->sRoleName = roleItem.get("RN", "").asString(); pRoleName->iServerID = roleItem.get("SD", 0).asInt(); pChild->RoleNameList.push_back(pRoleName); ++m_iTotalChildCount; } } } } } } catch (...) { Log("LoadChildOnlineInfo:", lmtException); } }
bool cAuthenticator::AuthWithYggdrasil(AString & a_UserName, const AString & a_ServerId, AString & a_UUID, Json::Value & a_Properties) { LOGD("Trying to authenticate user %s", a_UserName.c_str()); // Create the GET request: AString ActualAddress = m_Address; ReplaceString(ActualAddress, "%USERNAME%", a_UserName); ReplaceString(ActualAddress, "%SERVERID%", a_ServerId); AString Request; Request += "GET " + ActualAddress + " HTTP/1.0\r\n"; Request += "Host: " + m_Server + "\r\n"; Request += "User-Agent: MCServer\r\n"; Request += "Connection: close\r\n"; Request += "\r\n"; AString Response; if (!SecureGetFromAddress(StarfieldCACert(), m_Server, Request, Response)) { return false; } // Check the HTTP status line: const AString Prefix("HTTP/1.1 200 OK"); AString HexDump; if (Response.compare(0, Prefix.size(), Prefix)) { LOGINFO("User %s failed to auth, bad HTTP status line received", a_UserName.c_str()); LOGD("Response: \n%s", CreateHexDump(HexDump, Response.data(), Response.size(), 16).c_str()); return false; } // Erase the HTTP headers from the response: size_t idxHeadersEnd = Response.find("\r\n\r\n"); if (idxHeadersEnd == AString::npos) { LOGINFO("User %s failed to authenticate, bad HTTP response header received", a_UserName.c_str()); LOGD("Response: \n%s", CreateHexDump(HexDump, Response.data(), Response.size(), 16).c_str()); return false; } Response.erase(0, idxHeadersEnd + 4); // Parse the Json response: if (Response.empty()) { return false; } Json::Value root; Json::Reader reader; if (!reader.parse(Response, root, false)) { LOGWARNING("cAuthenticator: Cannot parse received data (authentication) to JSON!"); return false; } a_UserName = root.get("name", "Unknown").asString(); a_UUID = root.get("id", "").asString(); a_Properties = root["properties"]; // If the UUID doesn't contain the hashes, insert them at the proper places: if (a_UUID.size() == 32) { a_UUID.insert(8, "-"); a_UUID.insert(13, "-"); a_UUID.insert(18, "-"); a_UUID.insert(23, "-"); } return true; }
static void AddAnswer(DicomFindAnswers& answers, const Json::Value& resource, const DicomArray& query, const std::list<DicomTag>& sequencesToReturn, const DicomMap* counters) { DicomMap result; for (size_t i = 0; i < query.GetSize(); i++) { if (query.GetElement(i).GetTag() == DICOM_TAG_QUERY_RETRIEVE_LEVEL) { // Fix issue 30 on Google Code (QR response missing "Query/Retrieve Level" (008,0052)) result.SetValue(query.GetElement(i).GetTag(), query.GetElement(i).GetValue()); } else if (query.GetElement(i).GetTag() == DICOM_TAG_SPECIFIC_CHARACTER_SET) { // Do not include the encoding, this is handled by class ParsedDicomFile } else { std::string tag = query.GetElement(i).GetTag().Format(); std::string value; if (resource.isMember(tag)) { value = resource.get(tag, Json::arrayValue).get("Value", "").asString(); result.SetValue(query.GetElement(i).GetTag(), value, false); } else { result.SetValue(query.GetElement(i).GetTag(), "", false); } } } if (counters != NULL) { DicomArray tmp(*counters); for (size_t i = 0; i < tmp.GetSize(); i++) { result.SetValue(tmp.GetElement(i).GetTag(), tmp.GetElement(i).GetValue().GetContent(), false); } } if (result.GetSize() == 0 && sequencesToReturn.empty()) { LOG(WARNING) << "The C-FIND request does not return any DICOM tag"; } else if (sequencesToReturn.empty()) { answers.Add(result); } else { ParsedDicomFile dicom(result); for (std::list<DicomTag>::const_iterator tag = sequencesToReturn.begin(); tag != sequencesToReturn.end(); ++tag) { const Json::Value& source = resource[tag->Format()]; if (source.type() == Json::objectValue && source.isMember("Type") && source.isMember("Value") && source["Type"].asString() == "Sequence" && source["Value"].type() == Json::arrayValue) { Json::Value content = Json::arrayValue; for (Json::Value::ArrayIndex i = 0; i < source["Value"].size(); i++) { Json::Value item; Toolbox::SimplifyTags(item, source["Value"][i], DicomToJsonFormat_Short); content.append(item); } dicom.Replace(*tag, content, false, DicomReplaceMode_InsertIfAbsent); } } answers.Add(dicom); } }
void Item::deserialize(Json::Value& root) { _type = root.get("type", 0).asInt(); _count = root.get("count", 0).asInt(); _template = ItemTemplate::getTemplate(_type); }
////////////////////////////////////////////////////////////////////////// // addImport BcU32 CsPackageImporter::addImport( const Json::Value& Resource, BcBool IsCrossRef ) { PSY_LOGSCOPEDINDENT; std::lock_guard< std::recursive_mutex > Lock( BuildingLock_ ); BcAssert( BuildingBeginCount_ > 0 ); // Validate it's an object. BcAssertMsg( Resource.type() == Json::objectValue, "Can't import a value that isn't an object." ); // Validate name and type. Json::Value Name( Resource.get( "name", Json::nullValue ) ); Json::Value Type( Resource.get( "$Class", Json::nullValue ) ); if( Type == Json::nullValue ) { Type = Resource.get( "type", Json::nullValue ); } BcAssertMsg( Name.type() == Json::stringValue, "Name not specified for resource.\n" ); BcAssertMsg( Type.type() == Json::stringValue, "Type not specified for resource.\n" ) // Check if there is a resource with matching name already, as long as it isn't a cross ref. if( !IsCrossRef ) { auto AlreadyExisting = std::find_if( Resources_.begin(), Resources_.end(), [ this, &Name ]( const TResourceImport& ResourceImport ) { return ResourceImport.Importer_->getResourceName() == Name.asCString(); } ); BcAssertMsg( AlreadyExisting == Resources_.end(), "Resource \"%s\" already exists in package \"%s\"", Name.asCString(), (*Name_).c_str() ); } // Grab class, create importer. const ReClass* ResourceClass = ReManager::GetClass( Type.asCString() ); CsResourceImporterUPtr ResourceImporter; if( ResourceClass != nullptr ) { CsResourceImporterAttribute* ResourceImporterAttr = nullptr; do { ResourceImporterAttr = ResourceClass->getAttribute< CsResourceImporterAttribute >(); // Check on a parent to see if there is a valid importer attached to it. if( ResourceImporterAttr == nullptr ) { ResourceClass = ResourceClass->getSuper(); } } while( ResourceImporterAttr == nullptr && ResourceClass != nullptr ); if( ResourceImporterAttr != nullptr ) { ResourceImporter = ResourceImporterAttr->getImporter(); } } // BcAssertMsg( ResourceImporter != nullptr, "Can't create resource importer for \"%s\"", Type.asCString() ); // Serialise resource onto importer. CsSerialiserPackageObjectCodec ObjectCodec( nullptr, bcRFF_IMPORTER, bcRFF_NONE, bcRFF_IMPORTER ); SeJsonReader Reader( &ObjectCodec ); Reader.serialiseClassMembers( ResourceImporter.get(), ResourceImporter->getClass(), Resource, 0 ); // Add import with importer. return addImport( std::move( ResourceImporter ), Resource, IsCrossRef ); }
////////////////////////////////////////////////////////////////////////// // import BcBool CsPackageImporter::import( const BcName& Name ) { Name_ = Name; BcPath Path = CsCore::pImpl()->getPackageImportPath( Name ); PSY_LOGSCOPEDCATEGORY( "Import" ); PSY_LOG( "Importing %s...\n", (*Path).c_str() ); PSY_LOGSCOPEDINDENT; BcTimer TotalTimer; TotalTimer.mark(); // Store source file info. FsStats Stats; if( FsCore::pImpl()->fileStats( (*Path).c_str(), Stats ) ) { Header_.SourceFileStatsHash_ = BcHash( reinterpret_cast< BcU8* >( &Stats ), sizeof( Stats ) ); } else { Header_.SourceFileStatsHash_ = 0; } beginImport(); Header_.SourceFile_ = addString( (*Path).c_str() ); endImport(); Json::Value Root; if( loadJsonFile( (*Path).c_str(), Root ) ) { // Add as dependency. beginImport(); addDependency( (*Path).c_str() ); // Get resource list. Json::Value Resources( Root.get( "resources", Json::Value( Json::arrayValue ) ) ); // Add all package cross refs. addAllPackageCrossRefs( Resources ); // Set resource id to zero. ResourceIds_.store( 0 ); // Import everything. for( const auto& ResourceObject : Resources ) { addImport( ResourceObject, BcFalse ); } endImport(); // Sort importers. std::sort( Resources_.begin(), Resources_.end() ); // Iterate over all resources and import (import calls can append to the list) size_t CurrResourceIdx = 0; while( CurrResourceIdx < Resources_.size() ) { // Grab first resource in the list. auto ResourceEntry = std::move( Resources_[ CurrResourceIdx++ ] ); // Import resource. BcTimer ResourceTimer; ResourceTimer.mark(); try { PSY_LOGSCOPEDINDENT; beginImport(); if( importResource( std::move( ResourceEntry.Importer_ ), ResourceEntry.Resource_ ) ) { PSY_LOG( "SUCCEEDED: Time: %.2f seconds.\n", ResourceTimer.time() ); } else { PSY_LOG( "FAILED: Time: %.2f seconds.\n", ResourceTimer.time() ); BcBreakpoint; endImport(); return BcFalse; } endImport(); } catch( CsImportException ImportException ) { PSY_LOG( "FAILED: Time: %.2f seconds.\n", ResourceTimer.time() ); PSY_LOG( "ERROR: in file %s:\n%s\n", ImportException.file().c_str(), ImportException.what() ); endImport(); return BcFalse; } } // Save and return. BcPath PackedPackage( CsCore::pImpl()->getPackagePackedPath( Name ) ); BcBool SaveSuccess = save( PackedPackage ); if( SaveSuccess ) { PSY_LOG( "SUCCEEDED: Time: %.2f seconds.\n", TotalTimer.time() ); // Write out dependencies. std::string OutputDependencies = *CsCore::pImpl()->getPackageIntermediatePath( Name ) + "/deps.json"; CsSerialiserPackageObjectCodec ObjectCodec( nullptr, (BcU32)bcRFF_ALL, (BcU32)bcRFF_TRANSIENT, 0 ); SeJsonWriter Writer( &ObjectCodec ); Writer << Dependencies_; Writer.save( OutputDependencies.c_str() ); } else { PSY_LOG( "FAILED: Time: %.2f seconds.\n", TotalTimer.time() ); BcBreakpoint; } return SaveSuccess; } return BcFalse; }
/** * Simple proof of concept - currently just gets user email/password and * returns their token (as provided by Discord) which will be needed later * when making any other request of the Discord API */ int main(int argc, char * argv[]) { CURL *curl; CURLcode err; // get uname and pw std::cout << "Please enter your email: "; std::string email = ""; std::string pass = ""; getline(std::cin, email); std::cout << "Please enter your password: "******"email=" + email + "&password="******"https://discordapp.com/api/auth/login"); // uname and pw curl_easy_setopt(curl, CURLOPT_POSTFIELDS, post.c_str()); curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, writeString); curl_easy_setopt(curl, CURLOPT_WRITEDATA, &JSONresult); Json::Reader reader; Json::Value result; err = curl_easy_perform(curl); bool successfulParse = reader.parse(JSONresult, result); if (!successfulParse) { std::cout << "Parsing error: " << reader.getFormattedErrorMessages(); exit(0); } if (err != CURLE_OK) { std::cout << "Error: " << curl_easy_strerror(err); exit(0); } std::string token; if (result.isMember("token")) { token = result.get("token", result).asString(); std::cout << "\nResult: " << token << "\n"; } else { std::cout << "\nError: No token received, check your email and password!\n"; return 0; } curl_easy_cleanup(curl); } return 0; }
ShipType::ShipType(const Id &_id, const std::string &path) { Json::Reader reader; Json::Value data; isGlobalColorDefined = false; auto fd = FileSystem::gameDataFiles.ReadFile(path); if (!fd) { Output("couldn't open ship def '%s'\n", path.c_str()); throw ShipTypeLoadError(); } if (!reader.parse(fd->GetData(), fd->GetData()+fd->GetSize(), data)) { Output("couldn't read ship def '%s': %s\n", path.c_str(), reader.getFormattedErrorMessages().c_str()); throw ShipTypeLoadError(); } // determine what kind (tag) of ship this is. const std::string tagStr = data.get("tag", "").asString(); if( tagStr.empty() || strcasecmp(tagStr.c_str(), "ship")==0 ) { tag = TAG_SHIP; } else if( strcasecmp(tagStr.c_str(), "static")==0 ) { tag = TAG_STATIC_SHIP; } else if( strcasecmp(tagStr.c_str(), "missile")==0 ) { tag = TAG_MISSILE; } id = _id; name = data.get("name", "").asString(); shipClass = data.get("ship_class", "").asString(); manufacturer = data.get("manufacturer", "").asString(); modelName = data.get("model", "").asString(); cockpitName = data.get("cockpit", "").asString(); linThrust[THRUSTER_REVERSE] = data.get("reverse_thrust", 0.0f).asFloat(); linThrust[THRUSTER_FORWARD] = data.get("forward_thrust", 0.0f).asFloat(); linThrust[THRUSTER_UP] = data.get("up_thrust", 0.0f).asFloat(); linThrust[THRUSTER_DOWN] = data.get("down_thrust", 0.0f).asFloat(); linThrust[THRUSTER_LEFT] = data.get("left_thrust", 0.0f).asFloat(); linThrust[THRUSTER_RIGHT] = data.get("right_thrust", 0.0f).asFloat(); angThrust = data.get("angular_thrust", 0.0f).asFloat(); // Parse global thrusters color bool error = false; int parse = 0; for( Json::Value::iterator thruster_color = data["thruster_global_color"].begin() ; thruster_color != data["thruster_global_color"].end() ; ++thruster_color ) { const std::string colorchannel = thruster_color.key().asString(); if (colorchannel.length()!=1) { error = true; break; } if (colorchannel.at(0) == 'r') { globalThrusterColor.r = data["thruster_global_color"].get(colorchannel, 0).asInt(); parse++; continue; } else if (colorchannel.at(0) == 'g') { globalThrusterColor.g = data["thruster_global_color"].get(colorchannel, 0).asInt(); parse++; continue; } else if (colorchannel.at(0) == 'b') { globalThrusterColor.b = data["thruster_global_color"].get(colorchannel, 0).asInt(); parse++; continue; } else { // No 'r', no 'g', no 'b', no good :/ error = true; break; } } if (error==true) { Output("In file \"%s.json\" global thrusters custom color must be \"r\",\"g\" and \"b\"\n", modelName.c_str()); throw ShipTypeLoadError(); } else if (parse>0 && parse<3) { Output("In file \"%s.json\" global thrusters custom color is malformed\n", modelName.c_str()); throw ShipTypeLoadError(); } else if (parse==3) { globalThrusterColor.a = 255; isGlobalColorDefined = true; } // Parse direction thrusters color for (int i=0; i<THRUSTER_MAX; i++) isDirectionColorDefined[i]=false; error = false; for( Json::Value::iterator thruster_color = data["thruster_direction_color"].begin() ; thruster_color != data["thruster_direction_color"].end() ; ++thruster_color ) { const std::string th_color_dir = thruster_color.key().asString(); Json::Value dir_color = data["thruster_direction_color"].get(th_color_dir, 0); Color color; if (!dir_color.isMember("r")||!dir_color.isMember("g")||!dir_color.isMember("b")) { error = true; continue /* for */; } else { color.r = dir_color["r"].asInt(); color.g = dir_color["g"].asInt(); color.b = dir_color["b"].asInt(); color.a = 255; } if (th_color_dir.find("forward")!=std::string::npos) { isDirectionColorDefined[THRUSTER_FORWARD]=true; directionThrusterColor[THRUSTER_FORWARD]= color; } if (th_color_dir.find("retro")!=std::string::npos) { isDirectionColorDefined[THRUSTER_REVERSE]=true; directionThrusterColor[THRUSTER_REVERSE]= color; } if (th_color_dir.find("left")!=std::string::npos) { isDirectionColorDefined[THRUSTER_LEFT]=true; directionThrusterColor[THRUSTER_LEFT]= color; } if (th_color_dir.find("right")!=std::string::npos) { isDirectionColorDefined[THRUSTER_RIGHT]=true; directionThrusterColor[THRUSTER_RIGHT]= color; } if (th_color_dir.find("up")!=std::string::npos) { isDirectionColorDefined[THRUSTER_UP]=true; directionThrusterColor[THRUSTER_UP]= color; } if (th_color_dir.find("down")!=std::string::npos) { isDirectionColorDefined[THRUSTER_DOWN]=true; directionThrusterColor[THRUSTER_DOWN]= color; } } if (error==true) { for (int i=0; i<THRUSTER_MAX; i++) isDirectionColorDefined[i]=false; Output("In file \"%s.json\" directional thrusters custom color must be \"r\",\"g\" and \"b\"\n", modelName.c_str()); throw ShipTypeLoadError(); } // invert values where necessary linThrust[THRUSTER_FORWARD] *= -1.f; linThrust[THRUSTER_LEFT] *= -1.f; linThrust[THRUSTER_DOWN] *= -1.f; // angthrust fudge (XXX: why?) angThrust = angThrust * 0.5f; hullMass = data.get("hull_mass", 100).asInt(); capacity = data.get("capacity", 0).asInt(); fuelTankMass = data.get("fuel_tank_mass", 5).asInt(); for( Json::Value::iterator slot = data["slots"].begin() ; slot != data["slots"].end() ; ++slot ) { const std::string slotname = slot.key().asString(); slots[slotname] = data["slots"].get(slotname, 0).asInt(); } for( Json::Value::iterator role = data["roles"].begin(); role != data["roles"].end(); ++role ) { const std::string rolename = role.key().asString(); roles[rolename] = data["roles"].get(rolename, 0).asBool(); } for(int it=0;it<4;it++) thrusterUpgrades[it] = 1.0 + (double(it)/10.0); for( Json::Value::iterator slot = data["thrust_upgrades"].begin() ; slot != data["thrust_upgrades"].end() ; ++slot ) { const std::string slotname = slot.key().asString(); const int index = Clamp(atoi(&slotname.c_str()[9]), 1, 3); thrusterUpgrades[index] = data["thrust_upgrades"].get(slotname, 0).asDouble(); } atmosphericPressureLimit = data.get("atmospheric_pressure_limit", 10.0).asDouble(); // 10 atmosphere is about 90 metres underwater (on Earth) { const auto it = slots.find("engine"); if (it != slots.end()) { it->second = Clamp(it->second, 0, 1); } } effectiveExhaustVelocity = data.get("effective_exhaust_velocity", -1.0f).asFloat(); const float thruster_fuel_use = data.get("thruster_fuel_use", -1.0f).asFloat(); if(effectiveExhaustVelocity < 0 && thruster_fuel_use < 0) { // default value of v_c is used effectiveExhaustVelocity = 55000000; } else if(effectiveExhaustVelocity < 0 && thruster_fuel_use >= 0) { // v_c undefined and thruster fuel use defined -- use it! effectiveExhaustVelocity = GetEffectiveExhaustVelocity(fuelTankMass, thruster_fuel_use, linThrust[Thruster::THRUSTER_FORWARD]); } else { if(thruster_fuel_use >= 0) { Output("Warning: Both thruster_fuel_use and effective_exhaust_velocity defined for %s, using effective_exhaust_velocity.\n", modelName.c_str()); } } baseprice = data.get("price", 0.0).asDouble(); minCrew = data.get("min_crew", 1).asInt(); maxCrew = data.get("max_crew", 1).asInt(); hyperdriveClass = data.get("hyperdrive_class", 1).asInt(); }
config_t::config_t(const std::string& config_path) { if(!fs::exists(config_path)) { throw configuration_error_t("the configuration path doesn't exist"); } if(!fs::is_regular(config_path)) { throw configuration_error_t("the configuration path doesn't point to a file"); } path.config = config_path; fs::ifstream stream(path.config); if(!stream) { throw configuration_error_t("unable to open the configuration file"); } Json::Reader reader(Json::Features::strictMode()); Json::Value root; if(!reader.parse(stream, root)) { throw configuration_error_t("the configuration file is corrupted"); } // Validation if(root.get("version", 0).asUInt() != 2) { throw configuration_error_t("the configuration version is invalid"); } path.plugins = root["paths"].get("plugins", defaults::plugins_path).asString(); path.runtime = root["paths"].get("runtime", defaults::runtime_path).asString(); path.spool = root["paths"].get("spool", defaults::spool_path).asString(); validate_path(path.plugins); validate_path(path.runtime); validate_path(path.spool); // IO configuration char hostname[256]; if(gethostname(hostname, 256) == 0) { addrinfo hints, * result; std::memset(&hints, 0, sizeof(addrinfo)); hints.ai_flags = AI_CANONNAME; int rv = getaddrinfo(hostname, NULL, &hints, &result); if(rv != 0) { throw configuration_error_t("unable to determine the hostname - %s", gai_strerror(rv)); } if(result == NULL) { throw configuration_error_t("unable to determine the hostname"); } network.hostname = result->ai_canonname; freeaddrinfo(result); } else { throw system_error_t("unable to determine the hostname"); } Json::Value range(root["port-mapper"]["range"]); network.ports = { range[0].asUInt(), range[1].asUInt() }; network.threads = 1; // Component configuration services = parse(root["services"]); storages = parse(root["storages"]); loggers = parse(root["loggers"]); }
void Scene::loadJSON(const std::string &file) { #if MFluidSolver_LOG_LEVEL <= MFluidSolver_LOG_INFO std::cout << "INFO: Loading scene file: " << file << std::endl; #endif // Read JSON file Json::Reader reader; Json::Value root; std::ifstream sceneStream(file, std::ifstream::binary); bool success = reader.parse(sceneStream, root, false); if (!success) { #if MFluidSolver_LOG_LEVEL <= MFluidSolver_LOG_FATAL std::cerr << "FATAL: Failed to parse scene file " << std::endl; #endif throw InvalidSceneException(); } // Load container info glm::vec3 containerDim; containerDim.x = root["containerDim"].get( "scaleX", MFluidSolver_DEFAULT_SCENE_CONTAINER_SCALEX).asFloat(); containerDim.y = root["containerDim"].get( "scaleY", MFluidSolver_DEFAULT_SCENE_CONTAINER_SCALEY).asFloat(); containerDim.z = root["containerDim"].get( "scaleZ", MFluidSolver_DEFAULT_SCENE_CONTAINER_SCALEZ).asFloat(); // Load source info glm::vec3 particleDim; particleDim.x = root["particleDim"].get( "scaleX", MFluidSolver_DEFAULT_SCENE_SOURCE_SCALEX).asFloat(); particleDim.y = root["particleDim"].get( "scaleY", MFluidSolver_DEFAULT_SCENE_SOURCE_SCALEY).asFloat(); particleDim.z = root["particleDim"].get( "scaleZ", MFluidSolver_DEFAULT_SCENE_SOURCE_SCALEZ).asFloat(); glm::vec3 particleConPos; particleConPos.x = root["particleDim"].get( "posX", MFluidSolver_DEFAULT_SCENE_SOURCE_POSX).asFloat(); particleConPos.y = root["particleDim"].get( "posY", MFluidSolver_DEFAULT_SCENE_SOURCE_POSY).asFloat(); particleConPos.z = root["particleDim"].get( "posZ", MFluidSolver_DEFAULT_SCENE_SOURCE_POSZ).asFloat(); // Get spawn method info std::string spawningMethodString = root.get( "spawnMethod", MFluidSolver_DEFAULT_SPAWNMETHODSTRING).asString(); MUtils::toLowerInplace(&spawningMethodString); if (spawningMethodString == "jittered") { spawnMethod = ParticleSpawnMethod::Jittered; } else if (spawningMethodString == "poissondisk") { spawnMethod = ParticleSpawnMethod::PoissonDisk; } else if (spawningMethodString == "uniform") { spawnMethod = ParticleSpawnMethod::Uniform; } // Load camera info glm::vec3 cameraEye; cameraEye.x = root["camera"].get( "eyeX", MFluidSolver_DEFAULT_SCENE_CAMERA_EYEX).asFloat(); cameraEye.y = root["camera"].get( "eyeY", MFluidSolver_DEFAULT_SCENE_CAMERA_EYEY).asFloat(); cameraEye.z = root["camera"].get( "eyeZ", MFluidSolver_DEFAULT_SCENE_CAMERA_EYEZ).asFloat(); glm::vec3 cameraRef; cameraRef.x = root["camera"].get( "refX", MFluidSolver_DEFAULT_SCENE_CAMERA_REFX).asFloat(); cameraRef.y = root["camera"].get( "refY", MFluidSolver_DEFAULT_SCENE_CAMERA_REFY).asFloat(); cameraRef.z = root["camera"].get( "refZ", MFluidSolver_DEFAULT_SCENE_CAMERA_REFZ).asFloat(); // Set attributes camera.setEyeRef(cameraEye, cameraRef); float particleSeparation = root.get( "particleSeparation", MFluidSolver_DEFAULT_PARTICLE_SEPARATION).asFloat(); solver.setParticleSeparation(particleSeparation); int maxParticles = root.get( "maxParticles", MFluidSolver_DEFAULT_MAX_PARTICLES).asInt(); solver.setMaxParticles(maxParticles); solver.init(containerDim * -0.5f, containerDim * 0.5f); // Create geometry solver.fluidContainer = new Cube(glm::vec3(0)); solver.fluidContainer->name = "Fluid Container"; solver.fluidSource = new Cube(glm::vec3(0)); solver.fluidSource->name = "Fluid Source"; objects.push_back(solver.fluidContainer); objects.push_back(solver.fluidSource); // Change geometry scale solver.fluidContainer->transform.setScale(containerDim); solver.fluidSource->transform.setTransform( particleConPos, glm::vec3(0), particleDim); seedScene(); #if MFluidSolver_LOG_LEVEL <= MFluidSolver_LOG_INFO std::cout << "INFO: Particle count: " << solver.numParticles() << " / " << solver.maxParticles() << std::endl; #endif solver.sceneLoaded(); }
bool AbcSmc::parse_config(string conf_filename) { Json::Value par; // will contains the par value after parsing. Json::Reader reader; string json_data = slurp(conf_filename); bool parsingSuccessful = reader.parse( json_data, par ); if ( !parsingSuccessful ) { // report to the user the failure and their locations in the document. std::cerr << "Failed to parse configuration\n" << reader.getFormattedErrorMessages(); return false; } string executable = par.get("executable", "").asString(); if (executable != "") { set_executable( executable ); } string resume_dir = par.get("resume_directory", "").asString(); if (resume_dir != "") { if (_mp->mpi_rank == mpi_root) cerr << "Resuming in directory: " << resume_dir << endl; set_resume_directory( resume_dir ); set_resume( true ); } set_smc_iterations( par["smc_iterations"].asInt() ); // TODO: or have it test for convergence set_num_samples( par["num_samples"].asInt() ); set_predictive_prior_fraction( par["predictive_prior_fraction"].asFloat() ); set_pls_validation_training_fraction( par["pls_training_fraction"].asFloat() ); // fraction of runs to use for training set_database_filename( par["database_filename"].asString() ); // are we going to have particles that use a posterior from an earlier ABC run // to determine some of the parameter values? set_posterior_database_filename( par.get("posterior_database_filename", "").asString() ); if (_posterior_database_filename != "" and _num_smc_sets > 1) { cerr << "Using a posterior database as input is not currently supported with smc_iterations > 1. Aborting." << endl; exit(-203); } // Parse model parameters const Json::Value model_par = par["parameters"]; for ( unsigned int i = 0; i < model_par.size(); ++i ) {// Iterates over the sequence elements. string name = model_par[i]["name"].asString(); string short_name = model_par[i].get("short_name", "").asString(); PriorType ptype = UNIFORM; string ptype_str = model_par[i]["dist_type"].asString(); if (ptype_str == "UNIFORM") { ptype = UNIFORM; } else if (ptype_str == "NORMAL" or ptype_str == "GAUSSIAN") { ptype = NORMAL; } else if (ptype_str == "PSEUDO") { ptype = PSEUDO; } else if (ptype_str == "POSTERIOR") { ptype = POSTERIOR; if (_posterior_database_filename == "") { cerr << "Parameter specfied as type POSTERIOR, without previously specifying a posterior_database_filename. Aborting." << endl; exit(-204); } } else { cerr << "Unknown parameter distribution type: " << ptype_str << ". Aborting." << endl; exit(-205); } NumericType ntype = INT; string ntype_str = model_par[i]["num_type"].asString(); if (ntype_str == "INT") { ntype = INT; } else if (ntype_str == "FLOAT") { ntype = FLOAT; } else { cerr << "Unknown parameter numeric type: " << ntype_str << ". Aborting." << endl; exit(-206); } double par1 = model_par[i]["par1"].asDouble(); double par2 = model_par[i]["par2"].asDouble(); double step = model_par[i].get("step", 1.0).asDouble(); // default increment is 1 add_next_parameter( name, short_name, ptype, ntype, par1, par2, step); } // Parse model metrics const Json::Value model_met = par["metrics"]; for ( unsigned int i = 0; i < model_met.size(); ++i ) {// Iterates over the sequence elements. string name = model_met[i]["name"].asString(); string short_name = model_met[i].get("short_name", "").asString(); NumericType ntype = INT; string ntype_str = model_met[i]["num_type"].asString(); if (ntype_str == "INT") { ntype = INT; } else if (ntype_str == "FLOAT") { ntype = FLOAT; } else { cerr << "Unknown metric numeric type: " << ntype_str << ". Aborting." << endl; exit(-207); } double val = model_met[i]["value"].asDouble(); add_next_metric( name, short_name, ntype, val); } return true; }
Json::Value C1WireForWindows::readData(const _t1WireDevice& device,int unit) const { if (m_Socket==INVALID_SOCKET) throw C1WireForWindowsReadException("invalid socket"); // Request Json::Value reqRoot; reqRoot["ReadData"]["Id"]=device.devid; reqRoot["ReadData"]["Unit"]=unit; Json::FastWriter writer; // Send request and wait for answer std::string answer = SendAndReceive(writer.write(reqRoot)); // Answer processing Json::Value ansRoot; Json::Reader reader; if (answer.empty() || !reader.parse(answer,ansRoot)) throw C1WireForWindowsReadException("invalid answer"); if (!ansRoot["InvalidRequestReason"].isNull()) throw C1WireForWindowsReadException(std::string("1-wire readData : get an InvalidRequest answer with reason ").append(ansRoot.get("InvalidRequestReason","unknown").asString())); return ansRoot; }
void UserEntity::Deserialize(Json::Value& root) { name = root.get("name", "").asString(); type = root.get("type", "").asString(); }
void cWSSCompact::LoadEntitiesFromJson(Json::Value & a_Value, cEntityList & a_Entities, cBlockEntityList & a_BlockEntities, cWorld * a_World) { // Load chests: Json::Value AllChests = a_Value.get("Chests", Json::nullValue); if (!AllChests.empty()) { for (Json::Value::iterator itr = AllChests.begin(); itr != AllChests.end(); ++itr ) { std::auto_ptr<cChestEntity> ChestEntity(new cChestEntity(0, 0, 0, a_World)); if (!ChestEntity->LoadFromJson(*itr)) { LOGWARNING("ERROR READING CHEST FROM JSON!" ); } else { a_BlockEntities.push_back(ChestEntity.release()); } } // for itr - AllChests[] } // Load dispensers: Json::Value AllDispensers = a_Value.get("Dispensers", Json::nullValue); for (Json::Value::iterator itr = AllDispensers.begin(); itr != AllDispensers.end(); ++itr) { std::auto_ptr<cDispenserEntity> DispenserEntity(new cDispenserEntity(0, 0, 0, a_World)); if (!DispenserEntity->LoadFromJson(*itr)) { LOGWARNING("ERROR READING DISPENSER FROM JSON!" ); } else { a_BlockEntities.push_back(DispenserEntity.release()); } } // for itr - AllDispensers[] // Load Flowerpots: Json::Value AllFlowerPots = a_Value.get("FlowerPots", Json::nullValue); for (Json::Value::iterator itr = AllFlowerPots.begin(); itr != AllFlowerPots.end(); ++itr) { std::auto_ptr<cFlowerPotEntity> FlowerPotEntity(new cFlowerPotEntity(0, 0, 0, a_World)); if (!FlowerPotEntity->LoadFromJson(*itr)) { LOGWARNING("ERROR READING FLOWERPOT FROM JSON!" ); } else { a_BlockEntities.push_back(FlowerPotEntity.release()); } } // for itr - AllFlowerPots[] // Load furnaces: Json::Value AllFurnaces = a_Value.get("Furnaces", Json::nullValue); for (Json::Value::iterator itr = AllFurnaces.begin(); itr != AllFurnaces.end(); ++itr) { // TODO: The block type and meta aren't correct, there's no way to get them here std::auto_ptr<cFurnaceEntity> FurnaceEntity(new cFurnaceEntity(0, 0, 0, E_BLOCK_FURNACE, 0, a_World)); if (!FurnaceEntity->LoadFromJson(*itr)) { LOGWARNING("ERROR READING FURNACE FROM JSON!" ); } else { a_BlockEntities.push_back(FurnaceEntity.release()); } } // for itr - AllFurnaces[] // Load signs: Json::Value AllSigns = a_Value.get("Signs", Json::nullValue); for (Json::Value::iterator itr = AllSigns.begin(); itr != AllSigns.end(); ++itr) { std::auto_ptr<cSignEntity> SignEntity(new cSignEntity(E_BLOCK_SIGN_POST, 0, 0, 0, a_World)); if (!SignEntity->LoadFromJson(*itr)) { LOGWARNING("ERROR READING SIGN FROM JSON!"); } else { a_BlockEntities.push_back(SignEntity.release()); } } // for itr - AllSigns[] // Load note blocks: Json::Value AllNotes = a_Value.get("Notes", Json::nullValue); for (Json::Value::iterator itr = AllNotes.begin(); itr != AllNotes.end(); ++itr) { std::auto_ptr<cNoteEntity> NoteEntity(new cNoteEntity(0, 0, 0, a_World)); if (!NoteEntity->LoadFromJson(*itr)) { LOGWARNING("ERROR READING NOTE BLOCK FROM JSON!" ); } else { a_BlockEntities.push_back(NoteEntity.release()); } } // for itr - AllNotes[] // Load jukeboxes: Json::Value AllJukeboxes = a_Value.get("Jukeboxes", Json::nullValue); for (Json::Value::iterator itr = AllJukeboxes.begin(); itr != AllJukeboxes.end(); ++itr) { std::auto_ptr<cJukeboxEntity> JukeboxEntity(new cJukeboxEntity(0, 0, 0, a_World)); if (!JukeboxEntity->LoadFromJson(*itr)) { LOGWARNING("ERROR READING JUKEBOX FROM JSON!" ); } else { a_BlockEntities.push_back(JukeboxEntity.release()); } } // for itr - AllJukeboxes[] // Load command blocks: Json::Value AllCommandBlocks = a_Value.get("CommandBlocks", Json::nullValue); for (Json::Value::iterator itr = AllCommandBlocks.begin(); itr != AllCommandBlocks.end(); ++itr) { std::auto_ptr<cCommandBlockEntity> CommandBlockEntity(new cCommandBlockEntity(0, 0, 0, a_World)); if (!CommandBlockEntity->LoadFromJson(*itr)) { LOGWARNING("ERROR READING COMMAND BLOCK FROM JSON!" ); } else { a_BlockEntities.push_back(CommandBlockEntity.release()); } } // for itr - AllCommandBlocks[] // Load mob heads: Json::Value AllMobHeads = a_Value.get("MobHeads", Json::nullValue); for (Json::Value::iterator itr = AllMobHeads.begin(); itr != AllMobHeads.end(); ++itr) { std::auto_ptr<cMobHeadEntity> MobHeadEntity(new cMobHeadEntity(0, 0, 0, a_World)); if (!MobHeadEntity->LoadFromJson(*itr)) { LOGWARNING("ERROR READING MOB HEAD FROM JSON!" ); } else { a_BlockEntities.push_back(MobHeadEntity.release()); } } // for itr - AllMobHeads[] }
int main(int argc, const char *argv[]) { Json::Value options; char usage[] = "Usage: main [jobNum] config1 [config2 ...]"; if (argc <= 1) { std::cerr << "Too few arguments" << std::endl; std::cerr << usage << std::endl; return 1; } else if ((std::string(argv[1]) == "-h") || (std::string(argv[1]) == "--help")) { std::cout << usage << std::endl; return 0; } unsigned int configStart = 1; int jobNum = -1; // try to interpret the first arg as a job number bool isJobNum = true; for (int i = 0; argv[1][i] != '\0'; i++) { if (!isdigit(argv[1][i])) { isJobNum = false; break; } } if (isJobNum) { jobNum = atoi(argv[1]); configStart++; } for (int i = configStart; i < argc; i++) { if (! readJson(argv[i],options)) { return 1; } } int numTrials = options.get("trials",1).asUInt(); int startTrial = 0; int origNumTrials = numTrials; unsigned int numTrialsPerJob = options.get("trialsPerJob",1).asUInt(); unsigned int maxNumStepsPerEpisode = options.get("maxNumStepsPerEpisode",10000).asUInt(); if (jobNum < 0) { jobNum = 0; } else { startTrial = jobNum * numTrialsPerJob; numTrials = min((int)numTrialsPerJob,numTrials-startTrial); } if (numTrials <= 0) { std::cerr << "ERROR: insufficient number of trials: " << numTrials << std::endl; std::cerr << "Calculated from: jobNum: " << jobNum << " numTrialsPerJob: " << numTrialsPerJob << " numTrials: " << origNumTrials << std::endl; std::cerr << "Start trial should be: " << startTrial << std::endl; return 1; } replaceOptsDir(options); if (jobNum == 0) saveConfig(options); replaceOptsJob(options,boost::lexical_cast<std::string>(jobNum)); unsigned int numEpisodes = options.get("numEpisodesPerTrial",1).asUInt(); bool displayDescriptionQ = options["verbosity"].get("description",true).asBool(); bool displaySummaryQ = options["verbosity"].get("summary",true).asBool(); bool displayObsQ = options["verbosity"].get("observation",true).asBool(); bool displayStepsPerEpisodeQ = options["verbosity"].get("stepsPerEpisode",true).asBool(); bool displayStepsPerTrialQ = options["verbosity"].get("stepsPerTrial",true).asBool(); std::string saveFilename = options["save"].get("results","").asString(); bool saveResultsQ = (saveFilename != ""); bool randomizeSeedQ = options.get("randomizeSeed",false).asBool(); // running for fixed lengths unsigned int numStepsPerEpisode = options.get("numStepsPerEpisode",0).asUInt(); bool runForFixedLength = (numStepsPerEpisode != 0); // get the output DT information unsigned int outputDTSteps = options["verbosity"].get("dtsteps",0).asUInt(); std::string outputDTFilename = options["verbosity"].get("dtfile","").asString(); bool outputDTCSVQ = (outputDTFilename != ""); boost::shared_ptr<OutputDT> outputDT; boost::shared_ptr<std::vector<Action::Type> > actions; Observation obs; double startTime = getTime(); std::vector<std::vector<unsigned int> > numSteps(numTrials,std::vector<unsigned int>(numEpisodes,0)); std::vector<std::vector<unsigned int> > numCaptures(numTrials,std::vector<unsigned int>(numEpisodes,0)); std::vector<std::vector<unsigned int> > *results = &numSteps; if (runForFixedLength) results = &numCaptures; std::cout << "Running for " << numTrials << " trials" << std::endl; unsigned int trialNum; unsigned int randomSeed; for (int trial = 0; trial < numTrials; trial++) { trialNum = trial + startTrial; if (randomizeSeedQ) randomSeed = getTime() * 1000000 + 1000 * getpid() + trialNum; // hopefully random enough else randomSeed = trialNum; //std::cout << "RANDOM SEED: " << randomSeed << std::endl; Json::Value trialOptions(options); replaceOptsTrial(trialOptions,trialNum); boost::shared_ptr<World> world = createWorldAgents(randomSeed,trialNum,trialOptions); boost::shared_ptr<const WorldModel> model = world->getModel(); std::cout << "Ad hoc agent ind: " << model->getAdhocInd() << std::endl; // INITIALIZATION if (trial == 0) { if (displayDescriptionQ) std::cout << world->generateDescription() << std::endl; if (outputDTCSVQ) { // set up the actions actions = boost::shared_ptr<std::vector<Action::Type> >(new std::vector<Action::Type>(model->getNumAgents())); // create models for the DT csv output if required std::vector<std::string> modelNames; //modelNames.push_back("GR"); //modelNames.push_back("TA"); //modelNames.push_back("GP"); //modelNames.push_back("PD"); outputDT = boost::shared_ptr<OutputDT>(new OutputDT(outputDTFilename,model->getDims(),model->getNumAgents()-1,modelNames,true,false,outputDTSteps)); } } if (outputDTCSVQ) { if (outputDT->hasCollectedSufficientData()) { std::cout << "WARNING: collected sufficient data, stopping with " << trial << " trials" << std::endl; numSteps.resize(trial); break; } } if (displayStepsPerTrialQ) std::cout << "trial " << std::setw(2) << trialNum << ": " << std::flush; for (unsigned int episode = 0; episode < numEpisodes; episode++) { world->randomizePositions(); world->restartAgents(); if (outputDTCSVQ) { // for the first step, add the observation, since it keeps a history of 1 world->generateObservation(obs); outputDT->saveStep(trial,numSteps[trial][episode],obs,*actions); } while (!model->isPreyCaptured()) { numSteps[trial][episode]++; // check end conditions if (runForFixedLength) { if (numSteps[trial][episode] > numStepsPerEpisode) break; } else { if (numSteps[trial][episode] > maxNumStepsPerEpisode) { std::cerr << "TRIAL " << trial << " EPISODE " << episode << " TOO LONG" << std::endl; break; } } if (displayObsQ) { world->generateObservation(obs); std::cout << obs << std::endl; } world->step(actions); if (outputDTCSVQ){ world->generateObservation(obs); // should follow world->step so that we can extract the observed actions of the previous step outputDT->saveStep(trial,numSteps[trial][episode],obs,*actions); } // if we want to run for a fixed length and the prey is captured, find a new position for the prey if (runForFixedLength && model->isPreyCaptured()) { //std::cout << "Prey is captured, generating new position" << std::endl; world->randomizePreyPosition(); numCaptures[trial][episode]++; } } // while the episode lasts if (displayObsQ) { world->generateObservation(obs); std::cout << obs << std::endl; } if (displayStepsPerEpisodeQ) std::cout << std::setw(3) << (*results)[trial][episode] << " " << std::flush; } if (displayStepsPerTrialQ) displayStepsPerTrial(displayStepsPerEpisodeQ,(*results)[trial]); } // end for trial double endTime = getTime(); // optionally display the summary if (displaySummaryQ) displaySummary(endTime-startTime,*results); // optionally save the results if (saveResultsQ) saveResults(saveFilename,startTrial,*results); // optionally finialize the saving of data for the DT if (outputDTCSVQ) outputDT->finalizeSave(randomSeed); return 0; }
void testSubClassA::deSerialize(Json::Value& root){ _testSubADouble=root.get("testSubADouble",0.0).asDouble(); this->testClassA::deSerialize(root); }
void replaceOptsDir(Json::Value &options) { std::map<std::string,std::string> reps; reps["$(DIR)"] = options.get("dir","").asString(); jsonReplaceStrings(options,reps); }
void ParticleComponent::parsePrefab(json::Value& val) { if(val.isMember("texture")) { const std::string assetid = val["texture"].asString(); TextureAsset* asset = Asset::getTexture(assetid); if(asset) { //m_particleTexture.reset(new sf::Texture(*asset->getAsset())); m_particleSystem.reset(new thor::ParticleSystem(asset->getAsset())); createEmitter(); } else { szerr << "Prefab particle texture '" << assetid << "' could not be retrieved." << ErrorStream::error; } } if(val.isMember("emissionrate")) { setEmissionRate(static_cast<float>(val["emissionrate"].asDouble())); } if(val.isMember("lifetime")) { if(val["lifetime"].isArray() && val["lifetime"].size() == 2) { setLifetime( static_cast<float>(val["lifetime"][0U].asDouble()), static_cast<float>(val["lifetime"][1U].asDouble()) ); } else if(val["lifetime"].isArray() && val["lifetime"].size() == 1) { setLifetime( static_cast<float>(val["lifetime"][0U].asDouble()) ); } else if(!val["lifetime"].isArray()) { setLifetime(static_cast<float>(val["lifetime"].asDouble())); } else { szerr << "Invalid particle lifetime value in prefab." << ErrorStream::error; } } if(val.isMember("velocity")) { if(val["velocity"].isArray() && val["velocity"].size() == 2) { setVelocity(sf::Vector2f( static_cast<float>(val["velocity"][0U].asDouble()), static_cast<float>(val["velocity"][1U].asDouble()) )); } } if(val.isMember("rotation")) { if(val["rotation"].isArray() && val["rotation"].size() == 2) { setRotation( static_cast<float>(val["rotation"][0U].asDouble()), static_cast<float>(val["rotation"][1U].asDouble()) ); } else if(val["rotation"].isArray() && val["rotation"].size() == 1) { setRotation( static_cast<float>(val["rotation"][0U].asDouble()) ); } else if(!val["rotation"].isArray()) { setRotation(static_cast<float>(val["rotation"].asDouble())); } else { szerr << "Invalid particle rotation value in prefab." << ErrorStream::error; } } if(val.isMember("scale")) { if(val["scale"].isArray() && val["scale"].size() == 2) { setScale( static_cast<float>(val["scale"][0U].asDouble()), static_cast<float>(val["scale"][1U].asDouble()) ); } else if(val["scale"].isArray() && val["scale"].size() == 1) { setScale( static_cast<float>(val["scale"][0U].asDouble()) ); } else if(!val["scale"].isArray()) { setScale(static_cast<float>(val["scale"].asDouble())); } else { szerr << "Invalid particle scale value in prefab." << ErrorStream::error; } } if(val.isMember("rotationspeed")) { if(val["rotationspeed"].isArray() && val["rotationspeed"].size() == 2) { setRotationSpeed( static_cast<float>(val["rotationspeed"][0U].asDouble()), static_cast<float>(val["rotationspeed"][1U].asDouble()) ); } else if(val["rotationspeed"].isArray() && val["rotationspeed"].size() == 1) { setRotationSpeed( static_cast<float>(val["rotationspeed"][0U].asDouble()) ); } else if(!val["rotationspeed"].isArray()) { setRotationSpeed(static_cast<float>(val["rotationspeed"].asDouble())); } else { szerr << "Invalid particle rotationspeed value in prefab." << ErrorStream::error; } } if(val.isMember("affectors")) { json::Value affectors = val["affectors"]; for(json::Value::iterator it = affectors.begin(); it != affectors.end(); ++it) { const std::string affector = it.memberName(); if(affector == "fade") { addFadeAffector( static_cast<float>((*it)[0U].asDouble()), static_cast<float>((*it)[1U].asDouble()) ); } else if(affector == "scale") { addScaleAffector( static_cast<float>((*it)[0U].asDouble()), static_cast<float>((*it)[1U].asDouble()) ); } else if(affector == "force") { addForceAffector( static_cast<float>((*it)[0U].asDouble()), static_cast<float>((*it)[1U].asDouble()) ); } } } if(val.get("prewarm", 0).asBool()) { prewarm(); } if(val.isMember("shader")) { json::Value shader = val["shader"]; if(shader.isConvertibleTo(json::stringValue)) { setShader(shader.asString()); } else if(shader.isObject()) { setShader(shader["asset"].asString()); if(shader.isMember("param") && !shader["param"].empty()) { json::Value parameters = shader["param"]; for(json::Value::iterator it = parameters.begin(); it != parameters.end(); ++it) { const std::string name = it.memberName(); json::Value v = *it; if(v.isArray()) { switch(v.size()) { case 1: m_shaderAsset->getAsset()->setParameter(name, (float)v[0U].asDouble()); break; case 2: m_shaderAsset->getAsset()->setParameter(name, (float)v[0U].asDouble(), (float)v[1U].asDouble()); break; case 3: m_shaderAsset->getAsset()->setParameter(name, (float)v[0U].asDouble(), (float)v[1U].asDouble(), (float)v[2U].asDouble()); break; case 4: { m_shaderAsset->getAsset()->setParameter(name, (float)v[0U].asDouble(), (float)v[1U].asDouble(), (float)v[2U].asDouble(), (float)v[3U].asDouble()); break; } default: szerr << "Incorrect amount of parameter arguments in prefab." << ErrorStream::error; break; } } else if(v.isConvertibleTo(json::realValue)) { m_shaderAsset->getAsset()->setParameter(name, (float)v.asDouble()); } } } } } if(val.isMember("color")) { sf::Uint32 size = val["color"].size(); if(size == 3 || size == 4) { sf::Color color; color.r = static_cast<sf::Uint8>(val["color"][0U].asUInt()); color.g = static_cast<sf::Uint8>(val["color"][1U].asUInt()); color.b = static_cast<sf::Uint8>(val["color"][2U].asUInt()); if(size == 4) { color.a = static_cast<sf::Uint8>(val["color"][3U].asUInt()); } setColor(color); } } const std::string blend = val.get("blendmode", "alpha").asString(); if(blend == "alpha") { m_renderStates.blendMode = sf::BlendAlpha; } else if(blend == "additive") { m_renderStates.blendMode = sf::BlendAdd; } else if(blend == "multiply") { m_renderStates.blendMode = sf::BlendMultiply; } else if(blend == "none") { m_renderStates.blendMode = sf::BlendNone; } }
virtual Json::Value formFunction(const Json::Value& templateRoot){ markupImage = Mat(templateRoot.get("height", 0).asInt(), templateRoot.get("width", 0).asInt(), CV_8UC1, Scalar::all(255)); return super::formFunction(templateRoot); }