std::map<std::string, std::vector<std::string>> newsblur_api::mk_feeds_to_tags( json_object * folders) { std::map<std::string, std::vector<std::string>> result; array_list * tags = json_object_get_array(folders); int tags_len = array_list_length(tags); for (int i = 0; i < tags_len; ++i) { json_object * tag_to_feed_ids = json_object_array_get_idx(folders, i); if (!json_object_is_type(tag_to_feed_ids, json_type_object)) // "folders" array contains not only dictionaries describing // folders but also numbers, which are IDs of feeds that don't // belong to any folder. This check skips these IDs. continue; json_object_object_foreach(tag_to_feed_ids, key, feeds_with_tag_obj) { std::string std_key(key); array_list * feeds_with_tag_arr = json_object_get_array(feeds_with_tag_obj); int feeds_with_tag_len = array_list_length(feeds_with_tag_arr); for (int j = 0; j < feeds_with_tag_len; ++j) { json_object * feed_id_obj = json_object_array_get_idx(feeds_with_tag_obj, j); std::string feed_id(json_object_get_string(feed_id_obj)); result[feed_id].push_back(std_key); } } }
static void tg_shared_preferences_travel_register_list() { INT32 i = 0; INT32 len = 0; CHAR str[200]; Shared_Preferences_Register_Item* item =NULL; struct array_list* list = s_shared_preferences_register_list.list; //sem_wait (&s_shared_preferences_sem); tg_os_WaitSemaphore(s_shared_preferences_sem); len = array_list_length(list); //output_2_console_1( "travel register list"); for (;i<len;i++) { item = (Shared_Preferences_Register_Item*)array_list_get_idx(list, i); if(item) { sprintf(str,"Path:%s; Key:%s; Type:%d;",item->path,item->keys,item->type); // output_2_console_1( str); } } //sem_post (&s_shared_preferences_sem); tg_os_SignalSemaphore(s_shared_preferences_sem); }
static int ac_update_configuration_datachannelinterfaces(void* data, void* param) { int i; int mtu; int length; const char* bridge; struct ac_if_datachannel* iface = (struct ac_if_datachannel*)data; struct array_list* interfaces = (struct array_list*)param; /* Search interface */ length = array_list_length(interfaces); for (i = 0; i < length; i++) { struct json_object* jsonvalue = array_list_get_idx(interfaces, i); if (jsonvalue && (json_object_get_type(jsonvalue) == json_type_object)) { struct json_object* jsonindex = compat_json_object_object_get(jsonvalue, "Index"); if (jsonindex && (json_object_get_type(jsonindex) == json_type_int)) { if (iface->index == (unsigned long)json_object_get_int(jsonindex)) { if (!ac_update_configuration_getdatachannel_params(jsonvalue, &mtu, &bridge)) { /* TODO update interface */ } /* Interface found */ array_list_put_idx(interfaces, i, NULL); break; } } } } return ((i == length) ? HASH_DELETE_AND_CONTINUE : HASH_CONTINUE); }
std::vector<tagged_feedurl> ttrss_api::get_subscribed_urls() { std::vector<tagged_feedurl> feeds; struct json_object * content = run_op("getCategories", std::map<std::string, std::string>()); if (!content) return feeds; if (json_object_get_type(content) != json_type_array) return feeds; struct array_list * categories = json_object_get_array(content); int catsize = array_list_length(categories); // first fetch feeds within no category fetch_feeds_per_category(NULL, feeds); // then fetch the feeds of all categories for (int i=0;i<catsize;i++) { struct json_object * cat = (struct json_object *)array_list_get_idx(categories, i); fetch_feeds_per_category(cat, feeds); } json_object_put(content); return feeds; }
static Shared_Preferences_Register_Item* tg_shared_preferences_find_register_item(const CHAR* path,const CHAR* normalize_key,SharedPreferences_Notification_Callback cb,SharedPreferences_WRITE_TYPE type,INT32* idx) { INT32 i = 0; INT32 len = 0; Shared_Preferences_Register_Item* item =NULL; struct array_list* list = s_shared_preferences_register_list.list; return_val_if_fail(path,NULL); return_val_if_fail(list,NULL); //sem_wait (&s_shared_preferences_sem); tg_os_WaitSemaphore(s_shared_preferences_sem); len = array_list_length(list); for (;i<len;i++) { item = (Shared_Preferences_Register_Item*)array_list_get_idx(list, i); if (item==NULL || item->path==NULL ||item->keys==NULL) continue; else if ((item->cb==cb) && (item->type==type) && (strcmp(item->path,path)==0 )&& (strcmp(item->keys,normalize_key)==0 )) { break; } } //sem_post (&s_shared_preferences_sem); tg_os_SignalSemaphore(s_shared_preferences_sem); *idx= i; return i<len?item:NULL; }
void CANGenPlugin::parseMappingTable(const std::string& table) { scoped_lock<interprocess_recursive_mutex> lock(mutex); std::string json(table); std::replace(json.begin(), json.end(), '\'', '"');// replace all ' to " std::unique_ptr<json_object, decltype(&json_object_put)> rootobject(json_tokener_parse(json.c_str()), &json_object_put); if(!rootobject) { LOG_ERROR("Failed to parse json: " << json); return; } // Success, use json_obj here. mappingTable.clear(); json_object *sources = json_object_object_get(rootobject.get(),"sources"); if(!sources) return; array_list* arraySources = json_object_get_array(sources); if(!arraySources) return; for(int i=0; i < array_list_length(arraySources); ++i) { json_object *rootsource = static_cast<json_object*>(array_list_get_idx(arraySources,i)); if(!rootsource) continue; json_object* source = json_object_object_get(rootsource, "source"); if(!source) continue; json_object* guid = json_object_object_get(source, "guid"); const std::string guidstr(guid ? json_object_get_string(guid) : ""); json_object* signals = json_object_object_get(rootsource, "signals"); if(!signals) continue; array_list* arraySignals = json_object_get_array(signals); for(int j = 0; j < array_list_length(arraySignals); ++j) { json_object *signal = static_cast<json_object*>(array_list_get_idx(arraySignals,j)); if(!signal) continue; mappingTable.addProperty(guidstr, signal); }// signals array loop }// sources array loop }
std::map<std::string, std::vector<std::string>> newsblur_api::mk_feeds_to_tags( json_object * folders) { std::map<std::string, std::vector<std::string>> result; array_list * tags = json_object_get_array(folders); int tags_len = array_list_length(tags); for (int i = 0; i < tags_len; ++i) { json_object * tag_to_feed_ids = json_object_array_get_idx(folders, i); json_object_object_foreach(tag_to_feed_ids, key, feeds_with_tag_obj) { std::string std_key(key); array_list * feeds_with_tag_arr = json_object_get_array(feeds_with_tag_obj); int feeds_with_tag_len = array_list_length(feeds_with_tag_arr); for (int j = 0; j < feeds_with_tag_len; ++j) { json_object * feed_id_obj = json_object_array_get_idx(feeds_with_tag_obj, j); std::string feed_id(json_object_get_string(feed_id_obj)); result[feed_id].push_back(std_key); } } }
void ttrss_api::fetch_feeds_per_category(struct json_object * cat, std::vector<tagged_feedurl>& feeds) { const char * cat_name = NULL; struct json_object * cat_title_obj = NULL; int cat_id; if (cat) { struct json_object * cat_id_obj = json_object_object_get(cat, "id"); cat_id = json_object_get_int(cat_id_obj); // ignore special categories, for now if(cat_id < 0) return; cat_title_obj = json_object_object_get(cat, "title"); cat_name = json_object_get_string(cat_title_obj); LOG(LOG_DEBUG, "ttrss_api::fetch_feeds_per_category: id = %d title = %s", cat_id, cat_name); } else { // As uncategorized is a category itself (id = 0) and the default value // for a getFeeds is id = 0, the feeds in uncategorized will appear twice return; } std::map<std::string, std::string> args; if (cat) args["cat_id"] = utils::to_string<int>(cat_id); struct json_object * feed_list_obj = run_op("getFeeds", args); if (!feed_list_obj) return; struct array_list * feed_list = json_object_get_array(feed_list_obj); int feed_list_size = array_list_length(feed_list); for (int j=0;j<feed_list_size;j++) { struct json_object * feed = (struct json_object *)array_list_get_idx(feed_list, j); int feed_id = json_object_get_int(json_object_object_get(feed, "id")); const char * feed_title = json_object_get_string(json_object_object_get(feed, "title")); const char * feed_url = json_object_get_string(json_object_object_get(feed, "feed_url")); std::vector<std::string> tags; tags.push_back(std::string("~") + feed_title); if (cat_name) { tags.push_back(cat_name); } feeds.push_back(tagged_feedurl(utils::strprintf("%s#%d", feed_url, feed_id), tags)); // TODO: cache feed_id -> feed_url (or feed_url -> feed_id ?) } json_object_put(feed_list_obj); }
static INT32 tg_shared_preferences_get_first_free_slot(array_list* list) { INT32 i = 0; INT32 len = 0; return_val_if_fail(list,-1); len = array_list_length(list); for (;i<len;i++) { if (array_list_get_idx(list, i)==NULL) break; } return i; }
INT32 tg_shared_preferences_unregister_all() { INT32 i = 0; INT32 len = 0; struct array_list* list = s_shared_preferences_register_list.list; //sem_wait (&s_shared_preferences_sem); tg_os_WaitSemaphore(s_shared_preferences_sem); len = array_list_length(list); for(;i<len;i++) { array_list_put_idx(list, i, NULL); } //sem_post (&s_shared_preferences_sem); tg_os_SignalSemaphore(s_shared_preferences_sem); return SharedPreferences_SUCC; }
int read_channels_json(json_object *obj, fm_channel_t **channels, int *number) { int ret = -1; int i; if (obj) { array_list *channel_objs = json_object_get_array(json_object_object_get(obj, "channels")); if (channel_objs) { *number = array_list_length(channel_objs); *channels = (fm_channel_t *) malloc(*number * sizeof(fm_channel_t)); for (i = 0; i < *number; i++) { json_object *o = (json_object*) array_list_get_idx(channel_objs, i); int id = json_object_get_int(json_object_object_get(o, "channel_id")); (*channels)[i].id = id; (*channels)[i].name = strdup(json_object_get_string(json_object_object_get(o, "name"))); } ret = 0; } json_object_put(obj); } return ret; }
void *import_start_thread(void *arg) { struct status *status; struct mbox_data *data; int i; status = (struct status *) arg; DEBUG(1, ("[*] Importing thread started\n")); status->state = STATE_IMPORTING; status->start_time = time(NULL); for (i = 0; i < array_list_length(status->mbox_list); i++) { data = (struct mbox_data *) array_list_get_idx(status->mbox_list, i); // FIXME: first argument should be a TALLOC_CTX *mem_ctx!!! import_mailbox(data, status->local.session, data); } status->state = STATE_IMPORTED; status->end_time = time(NULL); DEBUG(1, ("[*] Importing thread stopped\n")); return NULL; }
static BOOL tg_shared_preferences_find_register_items(const CHAR* path,INT32** idx_list,INT32* idx_list_len) { INT32 i = 0; INT32 len = 0; INT32 count = 0; INT32* found_list= NULL; Shared_Preferences_Register_Item* item =NULL; struct array_list* list = s_shared_preferences_register_list.list; return_val_if_fail(path,FALSE); return_val_if_fail(list,FALSE); //sem_wait (&s_shared_preferences_sem); tg_os_WaitSemaphore(s_shared_preferences_sem); len = array_list_length(list); *idx_list_len=0; for (;i<len;i++) { item = (Shared_Preferences_Register_Item*)array_list_get_idx(list, i); if (item==NULL || item->path==NULL ||item->keys==NULL) continue; else if (strcmp(item->path,path)==0 ) { if (found_list==NULL) found_list=TG_CALLOC_V2(len*sizeof(INT32)); found_list[count]=i; count++; } } //sem_post (&s_shared_preferences_sem); tg_os_SignalSemaphore(s_shared_preferences_sem); if (found_list) { *idx_list = found_list; } *idx_list_len=count; return count>0?TRUE:FALSE; }
static INT32 tg_shared_preferences_find_lock_path(const CHAR* path) { INT32 i = 0; INT32 len = 0; struct array_list* list = s_shared_preferences_lock_list.lock_list; return_val_if_fail(path,-1); return_val_if_fail(list,-1); //sem_wait (&s_shared_preferences_sem); tg_os_WaitSemaphore(s_shared_preferences_sem); len = array_list_length(list); for (;i<len;i++) { CHAR* lock_path = (CHAR*)array_list_get_idx(list, i); if (lock_path==NULL) continue; else if (strcmp(lock_path,path)==0) break; } //sem_post (&s_shared_preferences_sem); tg_os_SignalSemaphore(s_shared_preferences_sem); return i<len?i:-1; }
void DatabaseSink::parseConfig() { json_object *rootobject; json_tokener *tokener = json_tokener_new(); enum json_tokener_error err; do { rootobject = json_tokener_parse_ex(tokener, configuration["properties"].c_str(),configuration["properties"].size()); } while ((err = json_tokener_get_error(tokener)) == json_tokener_continue); if (err != json_tokener_success) { fprintf(stderr, "Error: %s\n", json_tokener_error_desc(err)); } if (tokener->char_offset < configuration["properties"].size()) // XXX shouldn't access internal fields { //Should handle the extra data here sometime... } json_object *propobject = json_object_object_get(rootobject,"properties"); g_assert(json_object_get_type(propobject) == json_type_array); array_list *proplist = json_object_get_array(propobject); for(int i=0; i < array_list_length(proplist); i++) { json_object *idxobj = (json_object*)array_list_get_idx(proplist,i); std::string prop = json_object_get_string(idxobj); propertiesToSubscribeTo.push_back(prop); DebugOut()<<"DatabaseSink logging: "<<prop<<endl; } json_object_put(propobject); json_object_put(rootobject); }
void LoginOutputData::parse(json_object* obj) { if (obj == NULL) return; json_object* node; node = json_object_object_get(obj, "accountid"); this->accountid = json_object_get_int(node); node = json_object_object_get(obj, "nickname"); this->nickname.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "gender"); this->gender = json_object_get_int(node); node = json_object_object_get(obj, "birthday"); this->birthday.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "rank"); this->rank.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "introduction"); this->introduction.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "points"); this->points = json_object_get_int(node); node = json_object_object_get(obj, "impactpower"); this->impactpower = json_object_get_int(node); node = json_object_object_get(obj, "listenpower"); this->listenpower = json_object_get_int(node); node = json_object_object_get(obj, "avatar"); this->avatar.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "voice"); this->voice.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "voicetime"); this->voicetime = json_object_get_int(node); node = json_object_object_get(obj, "viptypeid"); this->viptypeid = json_object_get_int(node); node = json_object_object_get(obj, "vipstarttime"); this->vipstarttime.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "vipexpiretime"); this->vipexpiretime.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "created"); this->created.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "status"); this->status = json_object_get_int(node); node = json_object_object_get(obj, "kind"); this->kind = json_object_get_int(node); node = json_object_object_get(obj, "gold"); this->gold = json_object_get_int(node); node = json_object_object_get(obj, "nativeplace"); this->nativeplace.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "occupation"); this->occupation.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "dialect"); this->dialect.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "devicetoken"); this->devicetoken.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "ipowerlevel"); this->ipowerlevel = json_object_get_int(node); node = json_object_object_get(obj, "lpowerlevel"); this->lpowerlevel = json_object_get_int(node); node = json_object_object_get(obj, "isaskpermit"); this->isaskpermit = json_object_get_int(node); node = json_object_object_get(obj, "isagainstpermit"); this->isagainstpermit = json_object_get_int(node); node = json_object_object_get(obj, "giftcount"); this->giftcount = json_object_get_int(node); node = json_object_object_get(obj, "pickuppoints"); this->pickuppoints = json_object_get_int(node); node = json_object_object_get(obj, "levelconfig"); node = json_object_object_get(obj, "isquickuser"); this->isquickuser = json_object_get_int(node); node = json_object_object_get(obj, "isplisttester"); this->isplisttester = json_object_get_int(node); node = json_object_object_get(obj, "clientversion"); this->clientversion.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "updateurl"); this->updateurl.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "updatedesc"); this->updatedesc.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "restoredefaultplist"); this->restoredefaultplist = json_object_get_int(node); node = json_object_object_get(obj, "bindplatformlist"); array_list* array = json_object_get_array(obj); if (array) { int len = array_list_length(array); for (int i = 0; i < len; i++) { json_object *obj = (json_object*)array_list_get_idx(array, i); if (!obj) continue; json_type type = json_object_get_type(obj); if (type == json_type_object) { //* tmp = new (); //tmp.parse(obj); //bindplatformlist.push_back(tmp); } else if (type == json_type_int) { //bindplatformlist.push_back(itoa(json_object_get_int(obj))); } else if (type == json_type_double) { //bindplatformlist.push_back(json_object_get_double(obj)); } else if (type == json_type_string) { string str; bindplatformlist.push_back(str.assign(json_object_get_string(obj), json_object_get_string_len(obj))); } } } node = json_object_object_get(obj, "username"); this->username.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "platform"); this->platform.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "askpoints1"); this->askpoints1 = json_object_get_int(node); node = json_object_object_get(obj, "askpoints2"); this->askpoints2 = json_object_get_int(node); node = json_object_object_get(obj, "background"); this->background.assign(json_object_get_string(node), json_object_get_string_len(node)); node = json_object_object_get(obj, "pushmessage"); this->pushmessage = json_object_get_int(node); node = json_object_object_get(obj, "lbs"); this->lbs = json_object_get_int(node); node = json_object_object_get(obj, "weixinur"); this->weixinur = json_object_get_int(node); node = json_object_object_get(obj, "activitystatus"); this->activitystatus = json_object_get_int(node); node = json_object_object_get(obj, "shareversions"); this->shareversions = json_object_get_int(node); node = json_object_object_get(obj, "isnewdialect"); this->isnewdialect = json_object_get_int(node); node = json_object_object_get(obj, "vcolor"); this->vcolor = json_object_get_int(node); node = json_object_object_get(obj, "isready"); this->isready = json_object_get_int(node); node = json_object_object_get(obj, "tabidx"); this->tabidx = json_object_get_int(node); }
PASystem * PASystemNewFromJSON(json_object * jsSystem) { PASystem * sys; json_object * jsForces, * jsInitialParticles, *jsPartCount, *jsEmitters; array_list * forces, * emitters; sys = PASystemNew(); if(!((jsForces = json_object_object_get(jsSystem, "forces")) && (forces = json_object_get_array(jsForces)))) { malformedFileError("forces"); PASystemFree(sys); return NULL; } if(!((jsEmitters = json_object_object_get(jsSystem, "emitters")) && (emitters = json_object_get_array(jsEmitters)))) { malformedFileError("emitters"); PASystemFree(sys); return NULL; } if(!(jsInitialParticles = json_object_object_get(jsSystem, "initialParticles"))) { malformedFileError("initialParticles"); PASystemFree(sys); return NULL; } if(!(jsPartCount = json_object_object_get(jsInitialParticles, "count"))) { malformedFileError("initialParticles->count"); PASystemFree(sys); return NULL; } sys->initialParticleCount = json_object_get_double(jsPartCount); sys->forceCount = array_list_length(forces); sys->forces = (PAPhysicsForce **)calloc(sys->forceCount, sizeof(PAPhysicsForce *)); sys->emitterCount = array_list_length(emitters); sys->emitters = (PAEmitter **)calloc(sys->emitterCount, sizeof(PAEmitter *)); for(int i = 0; i < sys->forceCount; i++) { json_object * jsForce; if(!(jsForce = (json_object *)array_list_get_idx(forces, i))) { malformedFileError("force"); PASystemFree(sys); return NULL; } sys->forces[i] = PAPhysicsForceNewFromJSON(jsForce); } for(int i = 0; i < sys->emitterCount; i++) { json_object * jsEmitter; if(!(jsEmitter = (json_object *)array_list_get_idx(emitters, i))) { malformedFileError("emitter"); PASystemFree(sys); return NULL; } sys->emitters[i] = PAEmitterNewFromJSON(jsEmitter); } return sys; }
void LocalePrefsHandler::readLocaleFile() { // Read the locale file char* jsonStr = Utils::readFile(s_custLocaleFile); if (!jsonStr) jsonStr = Utils::readFile(s_defaultLocaleFile); if (!jsonStr) { //luna_critical(s_logChannel, "Failed to load locale files: [%s] nor [%s]", s_custLocaleFile,s_defaultLocaleFile); qCritical() << "Failed to load locale files: [" << s_custLocaleFile << "] nor [" << s_defaultLocaleFile << "]"; return; } json_object* root = 0; json_object* label = 0; array_list* localeArray = 0; root = json_tokener_parse(jsonStr); if (!root || is_error(root)) { //luna_critical(s_logChannel, "Failed to parse locale file contents into json"); qCritical() << "Failed to parse locale file contents into json"; goto Done; } label = json_object_object_get(root, "locale"); if (!label || is_error(label)) { //luna_critical(s_logChannel, "Failed to get locale entry from locale file"); qCritical() << "Failed to get locale entry from locale file"; goto Done; } localeArray = json_object_get_array(label); if (!localeArray) { //luna_critical(s_logChannel, "Failed to get locale array from locale file"); qCritical() << "Failed to get locale array from locale file"; goto Done; } for (int i = 0; i < array_list_length(localeArray); i++) { json_object* obj = (json_object*) array_list_get_idx(localeArray, i); LocaleEntry localeEntry; array_list* countryArray = 0; label = json_object_object_get(obj, "languageName"); if (!label || is_error(label)) continue; localeEntry.language.first = json_object_get_string(label); label = json_object_object_get(obj, "languageCode"); if (!label || is_error(label)) continue; localeEntry.language.second = json_object_get_string(label); label = json_object_object_get(obj, "countries"); if (!label || is_error(label)) continue; countryArray = json_object_get_array(label); for (int j = 0; j < array_list_length(countryArray); j++) { json_object* countryObj = (json_object*) array_list_get_idx(countryArray, j); NameCodePair country; label = json_object_object_get(countryObj, "countryName"); if (!label || is_error(label)) continue; country.first = json_object_get_string(label); label = json_object_object_get(countryObj, "countryCode"); if (!label || is_error(label)) continue; country.second = json_object_get_string(label); localeEntry.countries.push_back(country); } m_localeEntryList.push_back(localeEntry); } Done: if (root && !is_error(root)) json_object_put(root); delete [] jsonStr; }
void LocalePrefsHandler::readRegionFile() { // Read the locale file char* jsonStr = Utils::readFile(s_custRegionFile); if (!jsonStr) jsonStr = Utils::readFile(s_defaultRegionFile); if (!jsonStr) { //luna_critical(s_logChannel, "Failed to load region files: [%s] nor [%s]", s_custRegionFile,s_defaultRegionFile); qCritical() << "Failed to load region files: [" << s_custRegionFile << "] nor [" << s_defaultRegionFile << "]"; return; } json_object* root = 0; json_object* label = 0; array_list* regionArray = 0; root = json_tokener_parse(jsonStr); if (!root || is_error(root)) { //luna_critical(s_logChannel, "Failed to parse region file contents into json"); qCritical() << "Failed to parse region file contents into json"; goto Done; } label = json_object_object_get(root, "region"); if (!label || is_error(label)) { //luna_critical(s_logChannel, "Failed to get region entry from region file"); qCritical() << "Failed to get region entry from region file"; goto Done; } regionArray = json_object_get_array(label); if (!regionArray) { //luna_critical(s_logChannel, "Failed to get region array from region file"); qCritical() << "Failed to get region array from region file"; goto Done; } for (int i = 0; i < array_list_length(regionArray); i++) { json_object* obj = (json_object*) array_list_get_idx(regionArray, i); RegionEntry regionEntry; label = json_object_object_get(obj, "countryName"); if (!label || is_error(label)) continue; regionEntry.region[1] = std::string(json_object_get_string(label)); label = json_object_object_get(obj, "shortCountryName"); if (!label || is_error(label)) regionEntry.region[0] = regionEntry.region[1]; else regionEntry.region[0] = std::string(json_object_get_string(label)); label = json_object_object_get(obj, "countryCode"); if (!label || is_error(label)) continue; regionEntry.region[2] = std::string(json_object_get_string(label)); m_regionEntryList.push_back(regionEntry); } Done: if (root && !is_error(root)) json_object_put(root); delete [] jsonStr; }
int main(int argc, char **argv) { struct json_object *json; struct array_list *tests; struct lh_entry *entry; char *key; struct json_object *val; int i; context ctx; if (argc != 2) { printf("Usage: %s <filename>\n", argv[0]); return 1; } json = json_object_from_file(argv[1]); assert(!is_error(json)); assert(strcmp((char *) ((json_object_get_object(json)->head)->k), "tests") == 0); /* Get array of tests */ tests = json_object_get_array((struct json_object *) (json_object_get_object(json)->head)->v); for (i = 0; i < array_list_length(tests); i++) { /* Get test */ struct json_object *test = (struct json_object *) array_list_get_idx(tests, i); ctx.last_start_tag = NULL; ctx.content_model = NULL; ctx.process_cdata = false; /* Extract settings */ for (entry = json_object_get_object(test)->head; entry; entry = entry->next) { key = (char *) entry->k; val = (struct json_object *) entry->v; if (strcmp(key, "description") == 0) { printf("Test: %s\n", json_object_get_string(val)); } else if (strcmp(key, "input") == 0) { ctx.input = (const uint8_t *) json_object_get_string(val); ctx.input_len = json_object_get_string_len(val); } else if (strcmp(key, "output") == 0) { ctx.output = json_object_get_array(val); ctx.output_index = 0; ctx.char_off = 0; } else if (strcmp(key, "lastStartTag") == 0) { ctx.last_start_tag = (const char *) json_object_get_string(val); } else if (strcmp(key, "contentModelFlags") == 0) { ctx.content_model = json_object_get_array(val); } else if (strcmp(key, "processCDATA") == 0) { ctx.process_cdata = json_object_get_boolean(val); } } /* And run the test */ run_test(&ctx); } json_object_put(json); printf("PASS\n"); return 0; }
int OGRGMELayer::FetchDescribe() { CPLString osRequest = "tables/" + osTableId; CPLHTTPResult *psDescribe = poDS->MakeRequest(osRequest); if (psDescribe == NULL) return FALSE; CPLDebug("GME", "table doc = %s\n", psDescribe->pabyData); json_object *table_doc = OGRGMEParseJSON((const char *) psDescribe->pabyData); CPLHTTPDestroyResult(psDescribe); osTableName = OGRGMEGetJSONString(table_doc, "name"); poFeatureDefn = new OGRFeatureDefn(osTableName); poFeatureDefn->Reference(); json_object *schema_doc = json_object_object_get(table_doc, "schema"); json_object *columns_doc = json_object_object_get(schema_doc, "columns"); array_list *column_list = json_object_get_array(columns_doc); CPLString osLastGeomColumn; int field_count = array_list_length(column_list); for( int i = 0; i < field_count; i++ ) { OGRwkbGeometryType eFieldGeomType = wkbNone; json_object *field_obj = (json_object*) array_list_get_idx(column_list, i); const char* name = OGRGMEGetJSONString(field_obj, "name"); OGRFieldDefn oFieldDefn(name, OFTString); const char *type = OGRGMEGetJSONString(field_obj, "type"); if (EQUAL(type, "integer")) oFieldDefn.SetType(OFTInteger); else if (EQUAL(type, "double")) oFieldDefn.SetType(OFTReal); else if (EQUAL(type, "boolean")) oFieldDefn.SetType(OFTInteger); else if (EQUAL(type, "string")) oFieldDefn.SetType(OFTString); else if (EQUAL(type, "string")) { if (EQUAL(name, "gx_id")) { iGxIdField = i; } oFieldDefn.SetType(OFTString); } else if (EQUAL(type, "points")) eFieldGeomType = wkbPoint; else if (EQUAL(type, "linestrings")) eFieldGeomType = wkbLineString; else if (EQUAL(type, "polygons")) eFieldGeomType = wkbPolygon; else if (EQUAL(type, "mixedGeometry")) eFieldGeomType = wkbGeometryCollection; if (eFieldGeomType == wkbNone) { poFeatureDefn->AddFieldDefn(&oFieldDefn); } else { CPLAssert(EQUAL(osGeomColumnName,"")); osGeomColumnName = oFieldDefn.GetNameRef(); poFeatureDefn->SetGeomType(eFieldGeomType); poFeatureDefn->GetGeomFieldDefn(0)->SetSpatialRef(poSRS); } } json_object_put(table_doc); return TRUE; }
void run_test(context *ctx) { parserutils_inputstream *stream; hubbub_tokeniser *tok; hubbub_tokeniser_optparams params; int i, max_i; struct array_list *outputsave = ctx->output; if (ctx->content_model == NULL) { max_i = 1; } else { max_i = array_list_length(ctx->content_model); } /* We test for each of the content models specified */ for (i = 0; i < max_i; i++) { /* Reset expected output */ ctx->output = outputsave; ctx->output_index = 0; ctx->char_off = 0; assert(parserutils_inputstream_create("UTF-8", 0, NULL, &stream) == PARSERUTILS_OK); assert(hubbub_tokeniser_create(stream, &tok) == HUBBUB_OK); if (ctx->last_start_tag != NULL) { /* Fake up a start tag, in PCDATA state */ size_t len = strlen(ctx->last_start_tag) + 3; uint8_t *buf = malloc(len); snprintf((char *) buf, len, "<%s>", ctx->last_start_tag); assert(parserutils_inputstream_append(stream, buf, len - 1) == PARSERUTILS_OK); assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); free(buf); } if (ctx->process_cdata) { params.process_cdata = ctx->process_cdata; assert(hubbub_tokeniser_setopt(tok, HUBBUB_TOKENISER_PROCESS_CDATA, ¶ms) == HUBBUB_OK); } params.token_handler.handler = token_handler; params.token_handler.pw = ctx; assert(hubbub_tokeniser_setopt(tok, HUBBUB_TOKENISER_TOKEN_HANDLER, ¶ms) == HUBBUB_OK); if (ctx->content_model == NULL) { params.content_model.model = HUBBUB_CONTENT_MODEL_PCDATA; } else { const char *cm = json_object_get_string( (struct json_object *) array_list_get_idx(ctx->content_model, i)); if (strcmp(cm, "PCDATA") == 0) { params.content_model.model = HUBBUB_CONTENT_MODEL_PCDATA; } else if (strcmp(cm, "RCDATA") == 0) { params.content_model.model = HUBBUB_CONTENT_MODEL_RCDATA; } else if (strcmp(cm, "CDATA") == 0) { params.content_model.model = HUBBUB_CONTENT_MODEL_CDATA; } else { params.content_model.model = HUBBUB_CONTENT_MODEL_PLAINTEXT; } } assert(hubbub_tokeniser_setopt(tok, HUBBUB_TOKENISER_CONTENT_MODEL, ¶ms) == HUBBUB_OK); assert(parserutils_inputstream_append(stream, ctx->input, ctx->input_len) == PARSERUTILS_OK); assert(parserutils_inputstream_append(stream, NULL, 0) == PARSERUTILS_OK); printf("Input: '%.*s' (%d)\n", (int) ctx->input_len, (const char *) ctx->input, (int) ctx->input_len); assert(hubbub_tokeniser_run(tok) == HUBBUB_OK); hubbub_tokeniser_destroy(tok); parserutils_inputstream_destroy(stream); } }
hubbub_error token_handler(const hubbub_token *token, void *pw) { static const char *token_names[] = { "DOCTYPE", "StartTag", "EndTag", "Comment", "Character", "EOF" }; size_t i; context *ctx = (context *) pw; struct json_object *obj = NULL; struct array_list *items; for (; ctx->output_index < array_list_length(ctx->output); ctx->output_index++) { /* Get object for index */ obj = (struct json_object *) array_list_get_idx(ctx->output, ctx->output_index); /* If it's not a string, we've found the expected output */ if (json_object_get_type(obj) != json_type_string) break; /* Otherwise, it must be a parse error */ assert(strcmp(json_object_get_string(obj), "ParseError") == 0); } /* If we've run off the end, this is an error -- the tokeniser has * produced more tokens than expected. We allow for the generation * of a terminating EOF token, however. */ assert("too many tokens" && (ctx->output_index < array_list_length(ctx->output) || token->type == HUBBUB_TOKEN_EOF)); /* Got a terminating EOF -- no error */ if (ctx->output_index >= array_list_length(ctx->output)) return HUBBUB_OK; /* Now increment the output index so we don't re-expect this token */ ctx->output_index++; /* Expected output must be an array */ assert(json_object_get_type(obj) == json_type_array); items = json_object_get_array(obj); printf("got %s: expected %s\n", token_names[token->type], json_object_get_string((struct json_object *) array_list_get_idx(items, 0))); /* Make sure we got the token we expected */ assert(strcmp(token_names[token->type], json_object_get_string((struct json_object *) array_list_get_idx(items, 0))) == 0); switch (token->type) { case HUBBUB_TOKEN_DOCTYPE: { const char *expname = json_object_get_string( array_list_get_idx(items, 1)); const char *exppub = json_object_get_string( array_list_get_idx(items, 2)); const char *expsys = json_object_get_string( array_list_get_idx(items, 3)); bool expquirks = !json_object_get_boolean( array_list_get_idx(items, 4)); const char *gotname = (const char *)token->data.doctype.name.ptr; const char *gotpub, *gotsys; printf("'%.*s' %sids:\n", (int) token->data.doctype.name.len, gotname, token->data.doctype.force_quirks ? "(force-quirks) " : ""); if (token->data.doctype.public_missing) { gotpub = NULL; printf("\tpublic: missing\n"); } else { gotpub = (const char *) token->data.doctype.public_id.ptr; printf("\tpublic: '%.*s' (%d)\n", (int) token->data.doctype.public_id.len, gotpub, (int) token->data.doctype.public_id.len); } if (token->data.doctype.system_missing) { gotsys = NULL; printf("\tsystem: missing\n"); } else { gotsys = (const char *) token->data.doctype.system_id.ptr; printf("\tsystem: '%.*s' (%d)\n", (int) token->data.doctype.system_id.len, gotsys, (int) token->data.doctype.system_id.len); } assert(token->data.doctype.name.len == strlen(expname)); assert(strncmp(gotname, expname, strlen(expname)) == 0); assert((exppub == NULL) == (token->data.doctype.public_missing == true)); if (exppub) { assert(token->data.doctype.public_id.len == strlen(exppub)); assert(strncmp(gotpub, exppub, strlen(exppub)) == 0); } assert((expsys == NULL) == (token->data.doctype.system_missing == true)); if (gotsys) { assert(token->data.doctype.system_id.len == strlen(expsys)); assert(strncmp(gotsys, expsys, strlen(expsys)) == 0); } assert(expquirks == token->data.doctype.force_quirks); } break; case HUBBUB_TOKEN_START_TAG: { const char *expname = json_object_get_string( array_list_get_idx(items, 1)); struct lh_entry *expattrs = json_object_get_object( array_list_get_idx(items, 2))->head; bool self_closing = json_object_get_boolean( array_list_get_idx(items, 3)); const char *tagname = (const char *) token->data.tag.name.ptr; printf("expected: '%s' %s\n", expname, (self_closing) ? "(self-closing) " : ""); printf(" got: '%.*s' %s\n", (int) token->data.tag.name.len, tagname, (token->data.tag.self_closing) ? "(self-closing) " : ""); if (token->data.tag.n_attributes > 0) { printf("attributes:\n"); } assert(token->data.tag.name.len == strlen(expname)); assert(strncmp(tagname, expname, strlen(expname)) == 0); assert((token->data.tag.n_attributes == 0) == (expattrs == NULL)); assert(self_closing == token->data.tag.self_closing); for (i = 0; i < token->data.tag.n_attributes; i++) { char *expname = (char *) expattrs->k; const char *expval = json_object_get_string( (struct json_object *) expattrs->v); const char *gotname = (const char *) token->data.tag.attributes[i].name.ptr; size_t namelen = token->data.tag.attributes[i].name.len; const char *gotval = (const char *) token->data.tag.attributes[i].value.ptr; size_t vallen = token->data.tag.attributes[i].value.len; printf("\t'%.*s' = '%.*s'\n", (int) namelen, gotname, (int) vallen, gotval); assert(namelen == strlen(expname)); assert(strncmp(gotname, expname, strlen(expname)) == 0); assert(vallen == strlen(expval)); assert(strncmp(gotval, expval, strlen(expval)) == 0); expattrs = expattrs->next; } assert(expattrs == NULL); } break; case HUBBUB_TOKEN_END_TAG: { const char *expname = json_object_get_string( array_list_get_idx(items, 1)); const char *tagname = (const char *) token->data.tag.name.ptr; printf("'%.*s' %s\n", (int) token->data.tag.name.len, tagname, (token->data.tag.n_attributes > 0) ? "attributes:" : ""); assert(token->data.tag.name.len == strlen(expname)); assert(strncmp(tagname, expname, strlen(expname)) == 0); } break; case HUBBUB_TOKEN_COMMENT: { const char *expstr = json_object_get_string( array_list_get_idx(items, 1)); const char *gotstr = (const char *) token->data.comment.ptr; printf("expected: '%s'\n", expstr); printf(" got: '%.*s'\n", (int) token->data.comment.len, gotstr); assert(token->data.comment.len == strlen(expstr)); assert(strncmp(gotstr, expstr, strlen(expstr)) == 0); } break; case HUBBUB_TOKEN_CHARACTER: { int expstrlen = json_object_get_string_len( array_list_get_idx(items, 1)); const char *expstr =json_object_get_string( array_list_get_idx(items, 1)); const char *gotstr = (const char *) token->data.character.ptr; size_t len = min(token->data.character.len, expstrlen - ctx->char_off); printf("expected: '%.*s'\n", (int) len, expstr + ctx->char_off); printf(" got: '%.*s'\n", (int) token->data.character.len, gotstr); assert(memcmp(gotstr, expstr + ctx->char_off, len) == 0); if (len < token->data.character.len) { /* Expected token only contained part of the data * Calculate how much is left, then try again with * the next expected token */ hubbub_token t; t.type = HUBBUB_TOKEN_CHARACTER; t.data.character.ptr += len; t.data.character.len -= len; ctx->char_off = 0; token_handler(&t, pw); } else if (strlen(expstr + ctx->char_off) > token->data.character.len) { /* Tokeniser output only contained part of the data * in the expected token; calculate the offset into * the token and process the remainder next time */ ctx->char_off += len; ctx->output_index--; } else { /* Exact match - clear offset */ ctx->char_off = 0; } } break; case HUBBUB_TOKEN_EOF: printf("\n"); break; } return HUBBUB_OK; }
int CJSONArrayIterator::getSize() const { return m_array == 0 ? 0 : array_list_length(m_array); }
/** * @brief This parses configuration file such as (default.conf) and returns the instance of expr::ConfData class. */ expr::ConfData* expr::ConfigManager::parse_conf_json(std::string json_conf) { json_object* jobj = NULL; json_object* confjobj = NULL; json_object* subjobj = NULL; char* payload = NULL; payload = g_strdup(json_conf.c_str()); jobj = json_tokener_parse(payload); if((jobj == NULL) || is_error(jobj)) { return NULL; } bool parsed = true; expr::error_type e_type = expr::CONFIG; expr::error_code e_code = expr::CONFIG_PARSE; if(!json_object_object_get_ex(jobj, "conf", &confjobj)) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("cannot find conf"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } else { if(!json_object_object_get_ex(confjobj, "log_port", &subjobj)) { } else { int port = json_object_get_int(subjobj); expr::ConfData::getInstance()->set_log_port(port); } if(!json_object_object_get_ex(confjobj, "control_port", &subjobj)) { } else { int port = json_object_get_int(subjobj); expr::ConfData::getInstance()->set_ctl_port(port); } if(!json_object_object_get_ex(confjobj, "log_priority", &subjobj)) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("cannot find log_priority"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } else { std::string priority(json_object_get_string(subjobj)); expr::PRIORITY level = this->get_priority(priority); if(level == NOT_DEFINED) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("invalid Priority"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } else { expr::ConfData::getInstance()->set_log_priority(level); } } if(!json_object_object_get_ex(confjobj, "export_period", &subjobj)) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("cannot find export_period"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } else { expr::ConfData::getInstance()->set_export_period(json_object_get_int(subjobj)); } if(!json_object_object_get_ex(confjobj, "max_log_buffer_size", &subjobj)) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("cannot find max_log_buffer_size"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } else { expr::ConfData::getInstance()->set_max_log_buffer_size(json_object_get_int(subjobj)); } if(!json_object_object_get_ex(confjobj, "mapping", &subjobj)) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("cannot find mapping"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } else { struct array_list* arr_obj; struct lh_entry* entry; arr_obj = json_object_get_array(subjobj); if(arr_obj == NULL) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("mapping list error"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } int arr_size = array_list_length(arr_obj); if(arr_size < 1) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("please set mapping table"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } json_object* arr_item = NULL; int arr_index; std::string key; std::string value; std::string ps_log_id; std::string log_d_id; std::string log_F_path; for(arr_index = 0; arr_index < arr_size; arr_index++) { arr_item = static_cast<json_object*>(array_list_get_idx(arr_obj, arr_index)); entry = json_object_get_object(static_cast<struct json_object*>(arr_item))->head; ps_log_id.clear(); log_d_id.clear(); log_F_path.clear(); while(entry) { key.assign(static_cast<const char*>(entry->k)); value.assign(json_object_get_string((struct json_object*)entry->v)); if(key.compare("process_log_id") == 0) { ps_log_id = value; } else if(key.compare("log_file_id") == 0) { log_d_id = value; } else if(key.compare("log_file_path") == 0) { log_F_path = value; } else { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("Invalid key: " + key); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } entry = entry->next; } //while if(ps_log_id.empty()) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("process_log_id key is NOT found at array[" + std::to_string(arr_index) + "]"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } if(log_d_id.empty()) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("log_file_id key is NOT found at array[" + std::to_string(arr_index) + "]"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } if(log_F_path.empty()) { std::shared_ptr<expr::ErrorMessage> error_input_param = std::make_shared<expr::ErrorMessage>(e_type, e_code); error_input_param->set_error_desc("log_file_path key is NOT found at array[" + std::to_string(arr_index) + "]"); expr::ErrorMessagePool::getInstance()->add(error_input_param); parsed = false; } if( (ps_log_id.empty() == false) && (log_d_id.empty() == false) && (log_F_path.empty() == false) ) { expr::ConfData::getInstance()->add_to_table(ps_log_id, log_d_id, log_F_path); } } //for } } if(jobj) { json_object_put(jobj); } if(payload) { g_free(payload); payload = NULL; } expr::ConfData::getInstance()->set_parsed(parsed); return expr::ConfData::getInstance(); }
static void ac_update_configuration(struct json_object* jsonroot) { int i; int mtu; int length; const char* bridge; struct json_object* jsonelement; struct ac_if_datachannel* datachannel; ASSERT(jsonroot != NULL); /* Params { DataChannelInterfaces: [ { Index: [int], MTU: [int], Bridge: [string] } ] } */ /* DataChannelInterfaces */ jsonelement = compat_json_object_object_get(jsonroot, "DataChannelInterfaces"); if (jsonelement && (json_object_get_type(jsonelement) == json_type_array)) { struct array_list* interfaces = json_object_get_array(jsonelement); capwap_rwlock_wrlock(&g_ac.ifdatachannellock); /* Update and Remove active interfaces*/ capwap_hash_foreach(g_ac.ifdatachannel, ac_update_configuration_datachannelinterfaces, interfaces); /* Add new interfaces*/ length = array_list_length(interfaces); for (i = 0; i < length; i++) { struct json_object* jsonvalue = array_list_get_idx(interfaces, i); if (jsonvalue && (json_object_get_type(jsonvalue) == json_type_object)) { struct json_object* jsonindex = compat_json_object_object_get(jsonvalue, "Index"); if (jsonindex && (json_object_get_type(jsonindex) == json_type_int)) { int index = json_object_get_int(jsonindex); if ((index >= 0) && (index < AC_IFACE_MAX_INDEX) && !ac_update_configuration_getdatachannel_params(jsonvalue, &mtu, &bridge)) { datachannel = (struct ac_if_datachannel*)capwap_alloc(sizeof(struct ac_if_datachannel)); memset(datachannel, 0, sizeof(struct ac_if_datachannel)); /* */ datachannel->index = (unsigned long)index; datachannel->mtu = mtu; if (bridge && (strlen(bridge) < IFNAMSIZ)) { strcpy(datachannel->bridge, bridge); } /* */ if (!ac_update_configuration_create_datachannelinterfaces(datachannel)) { capwap_hash_add(g_ac.ifdatachannel, (void*)datachannel); } else { capwap_free(datachannel); } } } } } capwap_rwlock_unlock(&g_ac.ifdatachannellock); } }
void CANGenPlugin::dataReceived(libwebsocket* socket, const char* data, size_t len) { if(!data || len == 0) return; //TODO: refactor ? copied from websocketsink std::unique_ptr<json_object, decltype(&json_object_put)> rootobject(nullptr, &json_object_put); std::unique_ptr<json_tokener, decltype(&json_tokener_free)> tokener(json_tokener_new(), &json_tokener_free); enum json_tokener_error err; do { std::unique_ptr<json_object, decltype(&json_object_put)> tmpobject(json_tokener_parse_ex(tokener.get(), data, len), &json_object_put); rootobject.swap(tmpobject); } while ((err = json_tokener_get_error(tokener.get())) == json_tokener_continue); if (err != json_tokener_success) { LOG_ERROR("Error: " << json_tokener_error_desc(err) << std::endl); return; } if(!rootobject) { LOG_ERROR("Failed to parse json: " << data << std::endl); return; } if (tokener->char_offset < len) // XXX shouldn't access internal fields { // Handle extra characters after parsed object as desired. // e.g. issue an error, parse another object from that point, etc... } // Success, use jobj here. json_object *typeobject = json_object_object_get(rootobject.get(),"type"); json_object *nameobject = json_object_object_get(rootobject.get(),"name"); json_object *transidobject = json_object_object_get(rootobject.get(),"transactionid"); if(!typeobject || !nameobject || !transidobject) { DebugOut(DebugOut::Warning)<<"Malformed json. aborting"<<endl; return; } string type = string(json_object_get_string(typeobject)); string name = string(json_object_get_string(nameobject)); string id; if (json_object_get_type(transidobject) == json_type_string) { id = string(json_object_get_string(transidobject)); } else { stringstream strstr; strstr << json_object_get_int(transidobject); id = strstr.str(); } if (type == "method") { vector<string> propertyNames; list< std::tuple<string, string, string, Zone::Type, string> > propertyData; json_object *dataobject = json_object_object_get(rootobject.get(),"data"); if (json_object_get_type(dataobject) == json_type_array) { array_list *arraylist = json_object_get_array(dataobject); for (int i=0;i<array_list_length(arraylist);i++) { json_object *arrayobject = (json_object*)array_list_get_idx(arraylist,i); if (json_object_get_type(arrayobject) == json_type_object) { json_object *interfaceobject = json_object_object_get(arrayobject,"interface"); json_object *propobject = json_object_object_get(arrayobject,"property"); json_object *valueobject = json_object_object_get(arrayobject,"value"); json_object *zoneobject = json_object_object_get(arrayobject,"zone"); json_object *sourceobject = json_object_object_get(arrayobject,"source"); string interfacestr = string(interfaceobject ? json_object_get_string(interfaceobject) : "vcan0"); string keystr = string(propobject ? json_object_get_string(propobject) : ""); string valuestr = string(valueobject ? json_object_get_string(valueobject): ""); string sourcestr = string(sourceobject ? json_object_get_string(sourceobject): ""); Zone::Type z(Zone::None); if(zoneobject){ try { z = static_cast<Zone::Type>(boost::lexical_cast<int,std::string>(json_object_get_string(zoneobject))); } catch (...) { } } propertyData.push_back(make_tuple(interfacestr, keystr, valuestr, z, sourcestr)); } else if (json_object_get_type(arrayobject) == json_type_string) { string propertyName = string(json_object_get_string(arrayobject)); propertyNames.push_back(propertyName); } } //array_list_free(arraylist); } else { string path = json_object_get_string(dataobject); if (path != "") { propertyNames.push_back(path); } } if (type == "method") { if (name == "get") { if (!propertyNames.empty()) { //GetProperty is going to be a singleshot sink. getValue(socket,propertyNames.front(),Zone::None,id); } else if (!propertyData.empty()) { //GetProperty is going to be a singleshot sink. auto prop = propertyData.front(); getValue(socket,std::get<1>(prop),std::get<3>(prop),id); } else { LOG_WARNING(" \"get\" method called with no data! Transaction ID:" << id); } } else if (name == "set") { if (!propertyNames.empty()) { //Should not happen } else if (!propertyData.empty()) { auto prop = propertyData.begin(); for (auto prop = propertyData.begin(); prop != propertyData.end(); ++prop) { LOG_MESSAGE("websocketsinkmanager setting " << std::get<1>(*prop) << " to " << std::get<2>(*prop) << " in zone " << std::get<3>(*prop)); setValue(socket,std::get<1>(*prop),std::get<2>(*prop),std::get<3>(*prop),std::get<0>(*prop), id); } } } else if (name == "getSupportedEventTypes") { //If data.front() dosen't contain a property name, return a list of properties supported. //if it does, then return the event types that particular property supports. string typessupported = ""; if (propertyNames.empty()) { //Send what properties we support PropertyList foo(routingEngine->supported()); PropertyList::const_iterator i=foo.cbegin(); while (i != foo.cend()) { if(i==foo.cbegin()) typessupported.append("\"").append((*i)).append("\""); else typessupported.append(",\"").append((*i)).append("\""); ++i; } } else { //Send what events a particular property supports PropertyList foo(routingEngine->supported()); if (ListPlusPlus<VehicleProperty::Property>(&foo).contains(propertyNames.front())) { //sinkManager->addSingleShotSink(wsi,data.front(),id); typessupported = "\"get\",\"getSupportedEventTypes\""; } } stringstream s; string s2; s << "{\"type\":\"methodReply\",\"name\":\"getSupportedEventTypes\",\"data\":[" << typessupported << "],\"transactionid\":\"" << id << "\"}"; string replystr = s.str(); LOG_INFO(" JSON Reply: " << replystr); WebSockets::Write(socket, replystr); } else { DebugOut(0)<<"Unknown method called."<<endl; } } } }
boolean CJSONArrayIterator::isEnd() { return !(m_array != 0 && m_nCurItem < array_list_length(m_array)); }
rsspp::feed newsblur_api::fetch_feed(const std::string& id) { rsspp::feed f = known_feeds[id]; LOG(LOG_INFO, "newsblur_api::fetch_feed: about to fetch %u pages of feed %s", min_pages, id.c_str()); for(unsigned int i = 1; i <= min_pages; i++) { std::string page = utils::to_string(i); json_object * query_result = query_api("/reader/feed/" + id + "?page=" + page, NULL); if (!query_result) return f; json_object * stories = json_object_object_get(query_result, "stories"); if (!stories) { LOG(LOG_ERROR, "newsblur_api::fetch_feed: request returned no stories"); return f; } if (json_object_get_type(stories) != json_type_array) { LOG(LOG_ERROR, "newsblur_api::fetch_feed: content is not an array"); return f; } struct array_list * items = json_object_get_array(stories); int items_size = array_list_length(items); LOG(LOG_DEBUG, "newsblur_api::fetch_feed: %d items", items_size); for (int i = 0; i < items_size; i++) { struct json_object * item_obj = (struct json_object *)array_list_get_idx(items, i); const char * article_id = json_object_get_string(json_object_object_get(item_obj, "id")); const char * title = json_object_get_string(json_object_object_get(item_obj, "story_title")); const char * link = json_object_get_string(json_object_object_get(item_obj, "story_permalink")); const char * content = json_object_get_string(json_object_object_get(item_obj, "story_content")); const char * pub_date = json_object_get_string(json_object_object_get(item_obj, "story_date")); bool read_status = json_object_get_int(json_object_object_get(item_obj, "read_status")); rsspp::item item; if (title) item.title = title; if (link) item.link = link; if (content) item.content_encoded = content; item.guid = id + ID_SEPARATOR + article_id; if (read_status == 0) { item.labels.push_back("newsblur:unread"); } else if (read_status == 1) { item.labels.push_back("newsblur:read"); } item.pubDate_ts = parse_date(pub_date); char rfc822_date[128]; strftime(rfc822_date, sizeof(rfc822_date), "%a, %d %b %Y %H:%M:%S %z", gmtime(&item.pubDate_ts)); item.pubDate = rfc822_date; f.items.push_back(item); } } std::sort(f.items.begin(), f.items.end(), [](const rsspp::item& a, const rsspp::item& b) { return a.pubDate_ts > b.pubDate_ts; }); return f; }
rsspp::feed ttrss_api::fetch_feed(const std::string& id) { rsspp::feed f; f.rss_version = rsspp::TTRSS_JSON; std::map<std::string, std::string> args; args["feed_id"] = id; args["show_content"] = "1"; struct json_object * content = run_op("getHeadlines", args); if (!content) return f; if (json_object_get_type(content) != json_type_array) { LOG(LOG_ERROR, "ttrss_api::fetch_feed: content is not an array"); return f; } struct array_list * items = json_object_get_array(content); int items_size = array_list_length(items); LOG(LOG_DEBUG, "ttrss_api::fetch_feed: %d items", items_size); for (int i=0;i<items_size;i++) { struct json_object * item_obj = (struct json_object *)array_list_get_idx(items, i); int id = json_object_get_int(json_object_object_get(item_obj, "id")); const char * title = json_object_get_string(json_object_object_get(item_obj, "title")); const char * link = json_object_get_string(json_object_object_get(item_obj, "link")); const char * content = json_object_get_string(json_object_object_get(item_obj, "content")); time_t updated = (time_t)json_object_get_int(json_object_object_get(item_obj, "updated")); bool unread = json_object_get_boolean(json_object_object_get(item_obj, "unread")); rsspp::item item; if (title) item.title = title; if (link) item.link = link; if (content) item.content_encoded = content; item.guid = utils::strprintf("%d", id); if (unread) { item.labels.push_back("ttrss:unread"); } else { item.labels.push_back("ttrss:read"); } char rfc822_date[128]; strftime(rfc822_date, sizeof(rfc822_date), "%a, %d %b %Y %H:%M:%S %z", gmtime(&updated)); item.pubDate = rfc822_date; item.pubDate_ts = updated; f.items.push_back(item); } std::sort(f.items.begin(), f.items.end(), sort_by_pubdate); json_object_put(content); return f; }