static void to_json_dict_iter(const char *key, QObject *obj, void *opaque) { ToJsonIterState *s = opaque; QString *qkey; if (s->count) { qstring_append(s->str, ", "); } qkey = qstring_from_str(key); to_json(QOBJECT(qkey), s->str); QDECREF(qkey); qstring_append(s->str, ": "); to_json(obj, s->str); s->count++; }
QString *qobject_to_json(const QObject *obj) { QString *str = qstring_new(); to_json(obj, str); return str; }
char *g_variant_to_json(GVariant *obj) { GString *str = g_string_sized_new(30); to_json(obj, str, 0, 0); return g_string_free (str, FALSE); }
TEST(classifier_factory, invalid_unlearner_parameter) { storage_ptr s(new storage::local_storage); json js(new json_object); js["regularization_weight"] = to_json(1.0); js["unlearner"] = to_json(std::string("lru")); common::jsonconfig::config conf(js); EXPECT_THROW(classifier_factory::create_classifier("PA1", conf, s), common::exception::runtime_error); EXPECT_THROW(classifier_factory::create_classifier("PA2", conf, s), common::exception::runtime_error); EXPECT_THROW(classifier_factory::create_classifier("CW", conf, s), common::exception::runtime_error); EXPECT_THROW(classifier_factory::create_classifier("AROW", conf, s), common::exception::runtime_error); EXPECT_THROW(classifier_factory::create_classifier("NHERD", conf, s), common::exception::runtime_error); }
bool convert(const schema& s, json& j) { json::object o; json::array a; std::transform(s.begin(), s.end(), std::back_inserter(a), [](auto& t) { return to_json(t); }); o["types"] = std::move(a); j = std::move(o); return true; }
TEST(classifier_factory, invalid_algorithm) { storage_ptr s(new storage::local_storage); { // invalid classifier name json js(new json_object); js["regularization_weight"] = to_json(1.0); js["unlearner"] = to_json(std::string("lru")); js["unlearner_parameter"] = new json_object; js["unlearner_parameter"]["max_size"] = to_json(1); common::jsonconfig::config param(js); EXPECT_THROW(classifier_factory::create_classifier("pa", param, s), common::unsupported_method); EXPECT_THROW(classifier_factory::create_classifier("", param, s), common::unsupported_method); EXPECT_THROW(classifier_factory::create_classifier("saitama", param, s), common::unsupported_method); } }
int write_pops(hgt_pop **ps, hgt_params *params, int rank, int numprocs) { bstring to_json(hgt_pop **ps, hgt_params *params); int dest, tag; dest = 0; tag = 0; char *rc; bstring b = to_json(ps, params); rc = bstr2cstr(b, '\n'); if (rank != 0) { MPI_Send(rc, blength(b), MPI_CHAR, dest, tag, MPI_COMM_WORLD); free(rc); } else { FILE *fp = create_file(params->prefix, "populations", "json"); // fprintf(fp, "[\n"); MPI_Status status; int i; for (i = 0; i < numprocs; i++) { if (i != 0) { MPI_Probe(i, tag, MPI_COMM_WORLD, &status); int count; MPI_Get_count(&status, MPI_CHAR, &count); rc = malloc(count*sizeof(char)); MPI_Recv(rc, count, MPI_CHAR, i, tag, MPI_COMM_WORLD, &status); } fprintf(fp, "%s\n", rc); // if (i < numprocs - 1){ // fprintf(fp, ",\n"); // } free(rc); } // fprintf(fp, "]\n"); fclose(fp); } bdestroy(b); return EXIT_SUCCESS; }
TEST(classifier_factory, invalid_unlearner_config) { storage_ptr s(new storage::local_storage); json js(new json_object); js["unlearner"] = to_json(std::string("lru")); common::jsonconfig::config conf(js); EXPECT_THROW(classifier_factory::create_classifier("perceptron", conf, s), common::exception::runtime_error); EXPECT_THROW(classifier_factory::create_classifier("PA", conf, s), common::exception::runtime_error); }
void mmFilterTransactionsDialog::getDescription(mmHTMLBuilder &hb) { hb.addHorizontalLine(); hb.addHeader(3, _("Filtering Details: ")); // Extract the parameters from the transaction dialog and add them to the report. wxString filterDetails = to_json(); filterDetails.Replace(",\n", "<br>"); filterDetails.replace(0, 1, ' '); filterDetails.RemoveLast(1); hb.addText(filterDetails); }
static void to_json_list_iter(QObject *obj, void *opaque) { ToJsonIterState *s = opaque; if (s->count) { qstring_append(s->str, ", "); } to_json(obj, s->str); s->count++; }
TEST(to_json, test) { A a; a.i=3; a.j=4; a.b.k = 5; a.b.m = 6; std::string out; out = to_json(a); printf("%s", out.c_str()); }
TEST(classifier_factory, exception) { jsonconfig::config param(to_json(classifier_config())); local_storage* p = new local_storage; ASSERT_THROW(classifier_factory::create_classifier("pa", param, p), unsupported_method); ASSERT_THROW(classifier_factory::create_classifier("", param, p), unsupported_method); ASSERT_THROW(classifier_factory::create_classifier("saitama", param, p), unsupported_method); delete p; }
std::string value::to_json() const { std::vector<boost::asio::const_buffer> data; to_json(data); std::string result; for ( std::vector<boost::asio::const_buffer>::const_iterator i = data.begin(); i != data.end(); ++i) { result.append(boost::asio::buffer_cast<const char*>(*i), boost::asio::buffer_size(*i)); } return result; }
bool ToolConfig::saveConfig(std::string fileName) { std::ofstream out(fileName); if (!out.is_open()) { return false; } json config; to_json(config, *this); //ugly output //out << config; //pretty output out << std::setw(4) << config << std::endl; return true; }
Json json_classification(const vector<string>& sentence, const Mat<R>& probs) { // store sentence memory & tokens: auto sentence_viz = visualizable::Sentence<R>(sentence); // store sentence as input + distribution as output: Json::object json_example = { { "type", "classifier_example"}, { "input", sentence_viz.to_json()}, { "output", utils::json_finite_distribution(probs, SST::label_names) }, }; return json_example; }
static void to_json_dict_iter(const char *key, GVariant *obj, void *opaque) { ToJsonIterState *s = opaque; GVariant *qkey; int j; if (s->count) g_string_append(s->str, ", "); if (s->pretty) { g_string_append_c(s->str, '\n'); for (j = 0 ; j < s->indent ; j++) g_string_append(s->str, " "); } qkey = g_variant_new_string(key); to_json(qkey, s->str, s->pretty, s->indent); g_variant_unref(qkey); g_string_append(s->str, ": "); to_json(obj, s->str, s->pretty, s->indent); s->count++; }
common::jsonconfig::config make_compressive_config() { json js(new json_object); js = new json_object; js["bucket_size"] = to_json(200); js["bucket_length"] = to_json(2); js["compressed_bucket_size"] = to_json(20); js["bicriteria_base_size"] = to_json(2); js["forgetting_factor"] = to_json(0.0); js["forgetting_threshold"] = to_json(0.5); js["seed"] = to_json(0); common::jsonconfig::config conf(js); return conf; }
bool convert(type const& t, json& j) { json::object o; o["name"] = t.name(); o["kind"] = to_string(which(t)); if (!visit(jsonizer{o["structure"]}, t)) return false; json::array a; std::transform(t.attributes().begin(), t.attributes().end(), std::back_inserter(a), [](auto& x) { return to_json(x); }); o["attributes"] = std::move(a); j = std::move(o); return true; }
static void to_json_list_iter(GVariant *obj, void *opaque) { ToJsonIterState *s = opaque; int j; if (s->count) g_string_append(s->str, ", "); if (s->pretty) { g_string_append_c(s->str, '\n'); for (j = 0 ; j < s->indent ; j++) g_string_append(s->str, " "); } to_json(obj, s->str, s->pretty, s->indent); s->count++; }
void InitClassifiers(vector<classifier_base*>& classifiers) { jsonconfig::config param(to_json(classifier_config())); classifiers.push_back( classifier_factory::create_classifier("perceptron", param, new local_storage)); classifiers.push_back( classifier_factory::create_classifier("PA", param, new local_storage)); classifiers.push_back( classifier_factory::create_classifier("PA1", param, new local_storage)); classifiers.push_back( classifier_factory::create_classifier("PA2", param, new local_storage)); classifiers.push_back( classifier_factory::create_classifier("CW", param, new local_storage)); classifiers.push_back( classifier_factory::create_classifier("AROW", param, new local_storage)); classifiers.push_back( classifier_factory::create_classifier("NHERD", param, new local_storage)); }
int process(const ecto::tendrils& inputs, const ecto::tendrils& outputs) { //TODO move this logic to a function call. Document doc_new = *db_document_; doc_new.update_db(db_); PopulateDoc(*object_id_, *session_ids_, *model_method_, *model_submethod_, *model_parameters_, doc_new); // Read the input model parameters or_json::mValue in_parameters = to_json(*model_submethod_); // Find all the models of that type for that object View view(View::VIEW_MODEL_WHERE_OBJECT_ID_AND_MODEL_TYPE); view.Initialize(*object_id_, *model_method_); ViewIterator view_iterator(view, db_); ViewIterator iter = view_iterator.begin(), end = view_iterator.end(); for (; iter != end; ++iter) { // Compare the parameters bool is_incomplete_model_type = false; or_json::mValue db_parameters; // Yes, this is ugly but it's to make sure that we convert the old databases to the new style try { db_parameters = (*iter).get_value("subtype"); } catch (...) { is_incomplete_model_type = true; } // If they are the same, delete the current model in the database if ((CompareJsonIntersection(in_parameters, db_parameters)) || is_incomplete_model_type) { std::cout << "Deleting the previous model " << (*iter).id() << " of object " << *object_id_ << std::endl; db_.Delete((*iter).id()); } } doc_new.Persist(); return ecto::OK; }
struct watchman_query_result * watchman_do_query(struct watchman_connection *conn, const char *fs_path, const struct watchman_query *query, const struct watchman_expression *expr, struct watchman_error *error) { /* construct the json */ json_t *json = json_array(); json_array_append_new(json, json_string("query")); json_array_append_new(json, json_string(fs_path)); json_t *obj = json_object(); json_object_set_new(obj, "expression", to_json(expr)); if (query) { if (query->fields) { json_object_set_new(obj, "fields", fields_to_json(query->fields)); } if (query->empty_on_fresh) { json_object_set_new(obj, "empty_on_fresh_instance", json_true()); } if (query->s.time) { if (query->since_is_str) { json_object_set_new(obj, "since", json_string(query->s.str)); } else { json_t *since = json_integer(query->s.time); json_object_set_new(obj, "since", since); } } if (query->nr_suffixes) { /* Note that even if you have only one suffix, * watchman requires this to be an array. */ int i; json_t *suffixes = json_array(); for (i = 0; i < query->nr_suffixes; ++i) { json_array_append_new(suffixes, json_string(query->suffixes[i])); } json_object_set_new(obj, "suffix", suffixes); } if (query->nr_paths) { int i; json_t *paths = json_array(); for (i = 0; i < query->nr_paths; ++i) { json_array_append_new(paths, json_path(&query->paths[i])); } json_object_set_new(obj, "path", paths); } if (query->all) { json_object_set_new(obj, "all", json_string("all")); } if (query->sync_timeout >= 0) { json_object_set_new(obj, "sync_timeout", json_integer(query->sync_timeout)); } } json_array_append_new(json, obj); /* do the query */ struct watchman_query_result *r = watchman_query_json(conn, json, error); json_decref(json); return r; }
static json_t * to_json(const struct watchman_expression *expr) { json_t *result = json_array(); json_t *arg; json_array_append_new(result, json_string(ty_str[expr->ty])); int i; switch (expr->ty) { case WATCHMAN_EXPR_TY_ALLOF: /*-fallthrough*/ case WATCHMAN_EXPR_TY_ANYOF: for (i = 0; i < expr->e.union_expr.nr; ++i) { json_array_append_new(result, to_json(expr->e.union_expr.clauses[i])); } break; case WATCHMAN_EXPR_TY_NOT: json_array_append_new(result, to_json(expr->e.not_expr.clause)); break; case WATCHMAN_EXPR_TY_TRUE: /*-fallthrough*/ case WATCHMAN_EXPR_TY_FALSE: /*-fallthrough*/ case WATCHMAN_EXPR_TY_EMPTY: /*-fallthrough*/ case WATCHMAN_EXPR_TY_EXISTS: /* Nothing to do */ break; case WATCHMAN_EXPR_TY_SINCE: since_to_json(result, expr); break; case WATCHMAN_EXPR_TY_SUFFIX: json_array_append_new(result, json_string(expr->e.suffix_expr.suffix)); break; case WATCHMAN_EXPR_TY_MATCH: /*-fallthrough*/ case WATCHMAN_EXPR_TY_IMATCH: /*-fallthrough*/ case WATCHMAN_EXPR_TY_PCRE: /*-fallthrough*/ case WATCHMAN_EXPR_TY_IPCRE: json_array_append_new(result, json_string(expr->e.match_expr.match)); if (expr->e.match_expr.basename) { char *base = basename_str[expr->e.match_expr.basename]; json_array_append_new(result, json_string(base)); } break; case WATCHMAN_EXPR_TY_NAME: /*-fallthrough*/ case WATCHMAN_EXPR_TY_INAME: arg = json_string_or_array(expr->e.name_expr.nr, expr->e.name_expr.names); json_array_append_new(result, arg); if (expr->e.name_expr.basename) { char *base = basename_str[expr->e.name_expr.basename]; json_array_append_new(result, json_string(base)); } break; case WATCHMAN_EXPR_TY_TYPE: json_array_append_new(result, json_string_from_char(expr->e. type_expr.type)); } return result; }
TEST(regression_factory, trivial) { regression::regression_factory f; shared_ptr<storage::local_storage> s(new storage::local_storage); { common::jsonconfig::config param(to_json( regression::passive_aggressive::config())); shared_ptr<regression::regression_base> r = f.create_regression("PA", param, s); EXPECT_EQ(typeid(*r), typeid(regression::passive_aggressive&)); } { common::jsonconfig::config param(to_json( regression::passive_aggressive_1::config())); shared_ptr<regression::regression_base> r = f.create_regression("PA1", param, s); EXPECT_EQ(typeid(*r), typeid(regression::passive_aggressive_1&)); } { common::jsonconfig::config param(to_json( regression::passive_aggressive_2::config())); shared_ptr<regression::regression_base> r = f.create_regression("PA2", param, s); EXPECT_EQ(typeid(*r), typeid(regression::passive_aggressive_2&)); } { common::jsonconfig::config param(to_json( regression::perceptron::config())); shared_ptr<regression::regression_base> r = f.create_regression("perceptron", param, s); EXPECT_EQ(typeid(*r), typeid(regression::perceptron&)); } { common::jsonconfig::config param(to_json( regression::confidence_weighted::config())); shared_ptr<regression::regression_base> r = f.create_regression("CW", param, s); EXPECT_EQ(typeid(*r), typeid(regression::confidence_weighted&)); } { common::jsonconfig::config param(to_json( regression::arow::config())); shared_ptr<regression::regression_base> r = f.create_regression("AROW", param, s); EXPECT_EQ(typeid(*r), typeid(regression::arow&)); } { common::jsonconfig::config param(to_json( regression::normal_herd::config())); shared_ptr<regression::regression_base> r = f.create_regression("NHERD", param, s); EXPECT_EQ(typeid(*r), typeid(regression::normal_herd&)); } { common::jsonconfig::config param(to_json( regression::normal_herd::config())); shared_ptr<regression::regression_base> r = f.create_regression("NHERD", param, s); EXPECT_EQ(typeid(*r), typeid(regression::normal_herd&)); } { json js(new json_object); js["method"] = to_json(std::string("lsh")); js["parameter"] = json(new json_object); js["parameter"]["hash_num"] = to_json(8); js["nearest_neighbor_num"] = to_json(5); common::jsonconfig::config param(js); shared_ptr<regression::regression_base> r = f.create_regression("NN", param, s); EXPECT_EQ(typeid(*r), typeid(regression::nearest_neighbor_regression&)); } { common::jsonconfig::config param(to_json( regression::inverted_index_regression::config())); shared_ptr<regression::regression_base> r = f.create_regression("euclidean", param, s); EXPECT_EQ(typeid(*r), typeid(regression::euclidean_distance_regression&)); } { common::jsonconfig::config param(to_json( regression::inverted_index_regression::config())); shared_ptr<regression::regression_base> r = f.create_regression("cosine", param, s); EXPECT_EQ(typeid(*r), typeid(regression::cosine_similarity_regression&)); } }
configuration_monitor(const std::string& user_core_configuration_file_path, const std::string& system_core_configuration_file_path = constants::get_system_core_configuration_file_path()) : dispatcher_client() { std::vector<std::string> targets = { user_core_configuration_file_path, system_core_configuration_file_path, }; file_monitor_ = std::make_unique<pqrs::osx::file_monitor>(weak_dispatcher_, targets); file_monitor_->file_changed.connect([this, user_core_configuration_file_path, system_core_configuration_file_path](auto&& changed_file_path, auto&& changed_file_body) { auto file_path = changed_file_path; if (pqrs::filesystem::exists(user_core_configuration_file_path)) { // Note: // user_core_configuration_file_path == system_core_configuration_file_path // if console_user_server is not running. if (changed_file_path != user_core_configuration_file_path && changed_file_path == system_core_configuration_file_path) { // system_core_configuration_file_path is updated. // We ignore it because we are using user_core_configuration_file_path. return; } } else { if (changed_file_path == user_core_configuration_file_path) { // user_core_configuration_file_path is removed. if (pqrs::filesystem::exists(system_core_configuration_file_path)) { file_path = system_core_configuration_file_path; } } } if (pqrs::filesystem::exists(file_path)) { logger::get_logger()->info("Load {0}...", file_path); } auto c = std::make_shared<core_configuration::core_configuration>(file_path); if (core_configuration_ && !c->is_loaded()) { return; } if (core_configuration_ && core_configuration_->to_json() == c->to_json()) { return; } { std::lock_guard<std::mutex> lock(core_configuration_mutex_); core_configuration_ = c; } logger::get_logger()->info("core_configuration is updated."); enqueue_to_dispatcher([this, c] { core_configuration_updated(c); }); }); }
void to_json(char *buf, const char * data, int depth){ int array_count = 0; int object_count = 0; bson_iterator i; const char * key; char oidhex[25]; bson_iterator_init( &i , data ); sprintf(buf+strlen(buf),"{"); while ( bson_iterator_next( &i ) ){ bson_type t = bson_iterator_type( &i ); if ( t == 0 ) break; key = bson_iterator_key( &i ); if(object_count > 0){sprintf(buf+strlen(buf),",");} else{object_count=1;} sprintf(buf+strlen(buf), "\"%s\":" , key ); switch ( t ){ case bson_int: sprintf(buf+strlen(buf), "%d" , bson_iterator_int( &i ) ); break; case bson_double: sprintf(buf+strlen(buf), "%f" , bson_iterator_double( &i ) ); break; case bson_bool: sprintf(buf+strlen(buf), "%s" , bson_iterator_bool( &i ) ? "true" : "false" ); break; case bson_string: sprintf(buf+strlen(buf), "\"%s\"" , bson_iterator_string( &i ) ); break; case bson_null: sprintf( buf+strlen(buf),"null" ); break; case bson_oid: bson_oid_to_string(bson_iterator_oid(&i), oidhex); sprintf(buf+strlen(buf), "%s" , oidhex ); break; case bson_object: to_json(buf, bson_iterator_value( &i ) , depth + 1 ); break; case bson_array: sprintf(buf+strlen(buf), "[" ); bson_iterator j; bson_iterator_init( &j , bson_iterator_value(&i) ); array_count =0; while( bson_iterator_next(&j)){ if(array_count > 0){sprintf(buf+strlen(buf),",");} else{array_count=1;} to_json(buf, bson_iterator_value( &j ) , depth + 1 ); } sprintf(buf+strlen(buf), "]" ); break; default: fprintf( stderr , "can't print type : %d\n" , t ); } } sprintf(buf+strlen(buf),"}"); }
void Client::connect_handler (const boost::system::error_code &ec) { DEBUGFUNC; if (!ec) { if (!m_pending.empty()) { while (true != m_pending.empty()) { const std::string send_uri = make_send_uri(); const std::string request = make_http_request(send_uri, m_base64, m_host, m_port, to_json(m_pending.front())); m_pending.pop_front(); // std::cout << request << std::endl; m_tcp_socket.async_send(boost::asio::buffer(request.data(), request.length()), boost::bind(&Client::connect_handler, this, boost::asio::placeholders::error)); boost::asio::async_read_until(m_tcp_socket, m_response, "\r\n", boost::bind(&Client::response_read_handler, this, boost::asio::placeholders::error, boost::asio::placeholders::bytes_transferred)); } } else { // std::cout << "leave connect handler\n"; } } }
std::string JsonValue::to_json() const { std::string result; to_json(result); return result; }
json::value action::json()const { json::value v; v["type"] = type(); v["data"] = to_json(); return v; }
int main() { GOOGLE_PROTOBUF_VERIFY_VERSION; shiny::RenderRequest renreq; Render render; nextStage(); render.eye = Vector3(0, 1, 0); render.target = Vector3(0, 1, -1); render.scene.addSphere(-3, 1, -8, 3); render.scene.addSphere(3, 1.5, -7, 2); render.scene.spheres[1].material.emission = Vector3(0.5, 0.5, 0); render.start(); cout << "Serializing..." << endl; auto script = stages; /* [ [ ["add", "vector", { "n": 1, // Number of vectors "data": [[1, 1, 1], [1, 2, 3]], // Array of alternating start and end points, "line": true, // Whether to draw vector lines "arrow": true, // Whether to draw arrowheads "size": 0.07, // Size of the arrowhead relative to the stage }], ] ] */ ujson::value jsonScript{ script }; ujson::value jsonScene = to_json(render.scene); ofstream dataFile; dataFile.open (OUTPUT_FILE); dataFile << "window.mathboxScript = " << jsonScript << ";" << endl; dataFile << "window.mathboxScene = " << jsonScene << ";" << endl; dataFile.close(); /* Server server; server.start(); while (!server.exit) { shSleep(1); } server.stop(); //*/ google::protobuf::ShutdownProtobufLibrary(); cout << "Done!" << endl; /* thrust::host_vector<Vector3f> h_rpos(n); thrust::host_vector<Vector3f> h_rdir(n); thrust::host_vector<Vector3i> h_pos(n); thrust::host_vector<Vector3i> h_step(n); thrust::host_vector<Vector3f> h_tmax(n); thrust::host_vector<Vector3f> h_tdelta(n); thrust::host_vector<CubeSide> h_side(n); for (int i = 0; i < n; i++) { Vector3f *p; p = &h_rpos[i]; p->set(0.5f, 0.5f, 0.5f); p = &h_rdir[i]; p->set(0.01f, 1.f, -0.01f); } initTrace(n, h_rpos, h_rdir, h_pos, h_step, h_tmax, h_tdelta, h_side); printElement(0, h_rpos, h_rdir, h_pos, h_step, h_tmax, h_tdelta, h_side); step(n, h_pos, h_step, h_tmax, h_tdelta, h_side); printElement(0, h_rpos, h_rdir, h_pos, h_step, h_tmax, h_tdelta, h_side); */ return 0; }