/** * gfbgraph_connectable_default_parse_connected_data: * @self: a #GFBGraphConnectable. * @payload: a const #gchar with the response string from the Facebook Graph API. * @error: (allow-none): a #GError or %NULL. * * In most cases, #GFBGraphConnectable implementers can use this function in order to parse * the response when a gfbgraph_node_get_connection_nodes() is executed and the * gfbgraph_connectable_parse_connected_data() was called. * * Normally, Facebook Graph API returns the connections in the same way, using JSON objects, * with a root object called "data". * * Returns: (element-type GFBGraphNode) (transfer full): a newly-allocated #GList of #GFBGraphNode with the same #GType as @self. **/ GList* gfbgraph_connectable_default_parse_connected_data (GFBGraphConnectable *self, const gchar *payload, GError **error) { GList *nodes_list = NULL; JsonParser *jparser; GType node_type; node_type = G_OBJECT_TYPE (self); jparser = json_parser_new (); if (json_parser_load_from_data (jparser, payload, -1, error)) { JsonNode *root_jnode; JsonObject *main_jobject; JsonArray *nodes_jarray; int i = 0; root_jnode = json_parser_get_root (jparser); main_jobject = json_node_get_object (root_jnode); nodes_jarray = json_object_get_array_member (main_jobject, "data"); for (i = 0; i < json_array_get_length (nodes_jarray); i++) { JsonNode *jnode; GFBGraphNode *node; jnode = json_array_get_element (nodes_jarray, i); node = GFBGRAPH_NODE (json_gobject_deserialize (node_type, jnode)); nodes_list = g_list_append (nodes_list, node); } } g_clear_object (&jparser); return nodes_list; }
static gboolean parse_reviews (GsPlugin *plugin, JsonParser *parser, GsApp *app, GCancellable *cancellable, GError **error) { GsAuth *auth; JsonArray *array; const gchar *consumer_key = NULL; guint i; auth = gs_plugin_get_auth_by_id (plugin, "ubuntuone"); if (auth != NULL) consumer_key = gs_auth_get_metadata_item (auth, "consumer-key"); if (!JSON_NODE_HOLDS_ARRAY (json_parser_get_root (parser))) return FALSE; array = json_node_get_array (json_parser_get_root (parser)); for (i = 0; i < json_array_get_length (array); i++) { g_autoptr(AsReview) review = NULL; /* Read in from JSON... (skip bad entries) */ review = as_review_new (); if (parse_review (review, consumer_key, json_array_get_element (array, i))) gs_app_add_review (app, review); } return TRUE; }
gint json_array_get_parse_int_element(JsonArray *json, guint index, gboolean *success) { JsonNode *node = json_array_get_element(json, index); if(!node) { if(success) *success = FALSE; return 0; } return json_node_get_parse_int_member(node, success); }
static GVariant * parse_json_tuple (JsonNode *node, const GVariantType *child_type, GError **error) { GVariant *result = NULL; GPtrArray *children; GVariant *value; JsonArray *array; guint length; guint i; children = g_ptr_array_new (); if (!check_type (node, JSON_NODE_ARRAY, 0, error)) goto out; array = json_node_get_array (node); length = json_array_get_length (array); for (i = 0; i < length; i++) { value = NULL; if (child_type == NULL) { g_set_error (error, G_IO_ERROR, G_IO_ERROR_INVALID_DATA, "Too many values in tuple/struct"); } else { value = parse_json (json_array_get_element (array, i), child_type, error); } if (!value) goto out; g_ptr_array_add (children, value); child_type = g_variant_type_next (child_type); } if (child_type) { g_set_error (error, G_IO_ERROR, G_IO_ERROR_INVALID_DATA, "Too few values in tuple/struct"); goto out; } result = g_variant_new_tuple ((GVariant *const *)children->pdata, children->len); children->len = 0; out: g_ptr_array_foreach (children, (GFunc)g_variant_unref, NULL); g_ptr_array_free (children, TRUE); return result; }
/*! * Recursive function that handles converging \c JsonNode's to \c * GNode's. * * \param root \c Root JsonNode to convert. * \param node \c GNode. * \param parsing_array \c true if handling an array, else \c false. */ static void clr_oci_json_parse_aux(JsonNode* root, GNode* node, bool parsing_array) { guint i; g_assert (root); g_assert (node); if (JSON_NODE_TYPE(root) == JSON_NODE_OBJECT) { JsonObject *object = json_node_get_object(root); if (object) { guint j; guint size; GList* keys, *key = NULL; GList* values, *value = NULL; size = json_object_get_size(object); keys = json_object_get_members(object); values = json_object_get_values(object); node = g_node_append(node, g_node_new(NULL)); for (j = 0, key = keys, value = values; j < size; j++) { if (key) { node = g_node_append(node->parent, g_node_new(g_strdup(key->data))); } if (value) { clr_oci_json_parse_aux(value->data, node, false); } key = g_list_next(key); value = g_list_next(value); } if (keys) { g_list_free(keys); } if (values) { g_list_free(values); } } } else if (JSON_NODE_TYPE(root) == JSON_NODE_ARRAY) { JsonArray* array = json_node_get_array(root); guint array_size = json_array_get_length (array); JsonNode *array_element; for (i = 0; i < array_size; i++) { array_element = json_array_get_element(array, i); clr_oci_json_parse_aux(array_element, node, true); } } else if (JSON_NODE_TYPE(root) == JSON_NODE_VALUE) { node = g_node_append(node, g_node_new(clr_oci_json_string(root))); if (parsing_array) { node = g_node_append(node, g_node_new(NULL)); } } }
/* This function is synchronous! Blocking once at startup seems pretty * reasonable and allows us to avoid any complexity re. races */ static void mex_queue_model_load (MexQueueModel *model) { JsonParser *parser; gchar *filename; GError *error = NULL; JsonNode *root; JsonArray *array; gint i = 0; filename = _queue_file_name (); if (!g_file_test (filename, G_FILE_TEST_EXISTS)) { g_free (filename); return; } parser = json_parser_new (); if (!json_parser_load_from_file (parser, filename, &error)) { g_warning (G_STRLOC ": error populating from file: %s", error->message); g_clear_error (&error); goto out; } root = json_parser_get_root (parser); if (!JSON_NODE_HOLDS_ARRAY (root)) { g_warning (G_STRLOC ": JSON data not of expected format!"); goto out; } array = json_node_get_array (root); for (i = 0; i < json_array_get_length (array); i++) { MexContent *content; JsonNode *node; node = json_array_get_element (array, i); content = (MexContent *)json_gobject_deserialize (MEX_TYPE_PROGRAM, node); mex_model_add_content (MEX_MODEL (model), content); } out: g_free (filename); g_object_unref (parser); }
/* Import play list from a json object */ void plist_import_from_json( plist_t *pl, JsonArray *js_plist ) { int num_songs = json_array_get_length(js_plist); for ( int i = 0; i < num_songs; ++i ) { JsonNode *js_song_node = json_array_get_element(js_plist, i); if (!js_song_node) continue; if (!JSON_NODE_HOLDS_OBJECT(js_song_node)) continue; JsonObject *js_song = json_node_get_object(js_song_node); const char *name = js_get_string(js_song, "name", NULL); if (!name) continue; song_metadata_t metadata = SONG_METADATA_EMPTY; const char *title = js_get_string(js_song, "title", NULL); if (title) metadata.m_title = title; metadata.m_len = js_get_int(js_song, "length", 0); metadata.m_start_time = js_get_int(js_song, "start_time", -1); metadata.m_end_time = js_get_int(js_song, "end_time", -1); song_info_t *si = NULL; JsonObject *js_si = js_get_obj(js_song, "song_info"); if (js_si) { si = si_new(); si_set_artist (si, js_get_string(js_si, "artist", "")); si_set_name (si, js_get_string(js_si, "name", "")); si_set_album (si, js_get_string(js_si, "album", "")); si_set_year (si, js_get_string(js_si, "year", "")); si_set_genre (si, js_get_string(js_si, "genre", "")); si_set_comments (si, js_get_string(js_si, "comments", "")); si_set_track (si, js_get_string(js_si, "track", "")); } bool_t is_static_info = js_get_int(js_song, "static_info", 0); if (is_static_info) metadata.m_song_info = si; song_t *s = fu_is_prefixed(name) ? song_new_from_uri(name, &metadata) : song_new_from_file(name, &metadata); if (s) { if (!is_static_info && si) song_set_info(s, si); plist_add_song(pl, s, -1); } } }
/** * json_array_dup_element: * @array: a #JsonArray * @index_: the index of the element to retrieve * * Retrieves a copy of the #JsonNode containing the value of the * element at @index_ inside a #JsonArray * * Return value: (transfer full): a copy of the #JsonNode at the requested * index. Use json_node_free() when done. * * Since: 0.6 */ JsonNode * json_array_dup_element (JsonArray *array, guint index_) { JsonNode *retval; g_return_val_if_fail (array != NULL, NULL); g_return_val_if_fail (index_ < array->elements->len, NULL); retval = json_array_get_element (array, index_); if (!retval) return NULL; return json_node_copy (retval); }
gboolean _rpmostree_jsonutil_array_require_int_element (JsonArray *array, guint i, gint64 *out_val, GError **error) { JsonNode *node = json_array_get_element (array, i); if (!_jsonutil_node_check_int (node)) { g_set_error (error, G_IO_ERROR, G_IO_ERROR_FAILED, "Element at index %u is not an integer", i); *out_val = 0; return FALSE; } *out_val = json_array_get_int_element (array, i); return TRUE; }
static GVariant * parse_json_array (JsonNode *node, const GVariantType *child_type, GError **error) { GVariant *result = NULL; GPtrArray *children; GVariant *child; JsonArray *array; guint length; guint i; children = g_ptr_array_new (); if (!check_type (node, JSON_NODE_ARRAY, 0, error)) goto out; array = json_node_get_array (node); length = json_array_get_length (array); for (i = 0; i < length; i++) { child = parse_json (json_array_get_element (array, i), child_type, error); if (!child) goto out; g_ptr_array_add (children, child); } result = g_variant_new_array (child_type, (GVariant *const *)children->pdata, children->len); children->len = 0; out: g_ptr_array_foreach (children, (GFunc)g_variant_unref, NULL); g_ptr_array_free (children, TRUE); return result; }
static gboolean parse_review_entries (GsPlugin *plugin, JsonParser *parser, GError **error) { JsonArray *array; guint i; if (!JSON_NODE_HOLDS_ARRAY (json_parser_get_root (parser))) return FALSE; array = json_node_get_array (json_parser_get_root (parser)); for (i = 0; i < json_array_get_length (array); i++) { const gchar *package_name; Histogram histogram; /* Read in from JSON... (skip bad entries) */ if (!parse_review_entry (json_array_get_element (array, i), &package_name, &histogram)) continue; /* ...write into the database (abort everything if can't write) */ if (!set_package_stats (plugin, package_name, &histogram, error)) return FALSE; } return TRUE; }
static void cockpit_internal_metrics_prepare (CockpitChannel *channel) { CockpitInternalMetrics *self = COCKPIT_INTERNAL_METRICS (channel); JsonObject *options; JsonArray *metrics; int i; COCKPIT_CHANNEL_CLASS (cockpit_internal_metrics_parent_class)->prepare (channel); options = cockpit_channel_get_options (channel); /* "instances" option */ if (!cockpit_json_get_strv (options, "instances", NULL, (gchar ***)&self->instances)) { cockpit_channel_fail (channel, "protocol-error", "invalid \"instances\" option (not an array of strings)"); return; } /* "omit-instances" option */ if (!cockpit_json_get_strv (options, "omit-instances", NULL, (gchar ***)&self->omit_instances)) { cockpit_channel_fail (channel, "protocol-error", "invalid \"omit-instances\" option (not an array of strings)"); return; } /* "metrics" option */ self->n_metrics = 0; if (!cockpit_json_get_array (options, "metrics", NULL, &metrics)) { cockpit_channel_fail (channel, "protocol-error", "invalid \"metrics\" option was specified (not an array)"); return; } if (metrics) self->n_metrics = json_array_get_length (metrics); self->metrics = g_new0 (MetricInfo, self->n_metrics); for (i = 0; i < self->n_metrics; i++) { MetricInfo *info = &self->metrics[i]; if (!convert_metric_description (self, json_array_get_element (metrics, i), info, i)) return; if (!info->desc) { cockpit_channel_close (channel, "not-supported"); return; } } /* "interval" option */ if (!cockpit_json_get_int (options, "interval", 1000, &self->interval)) { cockpit_channel_fail (channel, "protocol-error", "invalid \"interval\" option"); return; } else if (self->interval <= 0 || self->interval > G_MAXINT) { cockpit_channel_fail (channel, "protocol-error", "invalid \"interval\" value: %" G_GINT64_FORMAT, self->interval); return; } self->need_meta = TRUE; cockpit_metrics_metronome (COCKPIT_METRICS (self), self->interval); cockpit_channel_ready (channel, NULL); }
static void ide_compile_commands_load_worker (GTask *task, gpointer source_object, gpointer task_data, GCancellable *cancellable) { IdeCompileCommands *self = source_object; GFile *gfile = task_data; g_autoptr(JsonParser) parser = NULL; g_autoptr(GError) error = NULL; g_autoptr(GHashTable) info_by_file = NULL; g_autoptr(GHashTable) directories_by_path = NULL; g_autoptr(GPtrArray) vala_info = NULL; g_autofree gchar *contents = NULL; JsonNode *root; JsonArray *ar; gsize len = 0; guint n_items; IDE_ENTRY; g_assert (G_IS_TASK (task)); g_assert (IDE_IS_COMPILE_COMMANDS (self)); g_assert (G_IS_FILE (gfile)); g_assert (!cancellable || G_IS_CANCELLABLE (cancellable)); parser = json_parser_new (); if (!g_file_load_contents (gfile, cancellable, &contents, &len, NULL, &error) || !json_parser_load_from_data (parser, contents, len, &error)) { g_task_return_error (task, g_steal_pointer (&error)); IDE_EXIT; } if (NULL == (root = json_parser_get_root (parser)) || !JSON_NODE_HOLDS_ARRAY (root) || NULL == (ar = json_node_get_array (root))) { g_task_return_new_error (task, G_IO_ERROR, G_IO_ERROR_INVALID_DATA, "Failed to extract commands, invalid json"); IDE_EXIT; } info_by_file = g_hash_table_new_full (g_file_hash, (GEqualFunc)g_file_equal, NULL, compile_info_free); directories_by_path = g_hash_table_new_full (g_str_hash, g_str_equal, NULL, g_object_unref); vala_info = g_ptr_array_new_with_free_func (compile_info_free); n_items = json_array_get_length (ar); for (guint i = 0; i < n_items; i++) { CompileInfo *info; JsonNode *item; JsonNode *value; JsonObject *obj; GFile *dir; const gchar *directory = NULL; const gchar *file = NULL; const gchar *command = NULL; item = json_array_get_element (ar, i); /* Skip past this node if its invalid for some reason, so we * can try to be tolerante of errors created by broken tooling. */ if (item == NULL || !JSON_NODE_HOLDS_OBJECT (item) || NULL == (obj = json_node_get_object (item))) continue; if (json_object_has_member (obj, "file") && NULL != (value = json_object_get_member (obj, "file")) && JSON_NODE_HOLDS_VALUE (value)) file = json_node_get_string (value); if (json_object_has_member (obj, "directory") && NULL != (value = json_object_get_member (obj, "directory")) && JSON_NODE_HOLDS_VALUE (value)) directory = json_node_get_string (value); if (json_object_has_member (obj, "command") && NULL != (value = json_object_get_member (obj, "command")) && JSON_NODE_HOLDS_VALUE (value)) command = json_node_get_string (value); /* Ignore items that are missing something or other */ if (file == NULL || command == NULL || directory == NULL) continue; /* Try to reduce the number of GFile we have for directories */ if (NULL == (dir = g_hash_table_lookup (directories_by_path, directory))) { dir = g_file_new_for_path (directory); g_hash_table_insert (directories_by_path, (gchar *)directory, dir); } info = g_slice_new (CompileInfo); info->file = g_file_resolve_relative_path (dir, file); info->directory = g_object_ref (dir); info->command = g_strdup (command); g_hash_table_replace (info_by_file, info->file, info); /* * We might need to keep a special copy of this for resolving .vala * builds which won't be able ot be matched based on the filename. We * keep all of them around right now in case we want to later on find * the closest match based on directory. */ if (g_str_has_suffix (file, ".vala")) { info = g_slice_new (CompileInfo); info->file = g_file_resolve_relative_path (dir, file); info->directory = g_object_ref (dir); info->command = g_strdup (command); g_ptr_array_add (vala_info, info); } } self->info_by_file = g_steal_pointer (&info_by_file); self->vala_info = g_steal_pointer (&vala_info); g_task_return_boolean (task, TRUE); IDE_EXIT; }
JsonNode* Settings::getNode(const char* path) { JsonNode* node = queryNode(path); JsonArray* array = json_node_get_array (node); return json_array_get_element(array, 0); }
static gboolean parse_json (GssAdaptive * adaptive, JsonParser * parser, const char *dir, const char *requested_version) { JsonNode *node; JsonObject *obj; JsonNode *n; JsonArray *version_array; int version; int len; int i; g_return_val_if_fail (adaptive != NULL, FALSE); g_return_val_if_fail (parser != NULL, FALSE); g_return_val_if_fail (dir != NULL, FALSE); node = json_parser_get_root (parser); obj = json_node_get_object (node); n = json_object_get_member (obj, "manifest_version"); version = json_node_get_int (n); if (version != 0) { GST_ERROR ("bad version %d", version); return FALSE; } n = json_object_get_member (obj, "versions"); version_array = json_node_get_array (n); len = json_array_get_length (version_array); for (i = 0; i < len; i++) { JsonArray *files_array; int files_len; const char *version_string; int j; n = json_array_get_element (version_array, i); if (n == NULL) return FALSE; obj = json_node_get_object (n); if (obj == NULL) return FALSE; n = json_object_get_member (obj, "version"); if (n == NULL) return FALSE; version_string = json_node_get_string (n); if (version_string == NULL) return FALSE; if (strcmp (version_string, requested_version) != 0) continue; n = json_object_get_member (obj, "files"); if (n == NULL) return FALSE; files_array = json_node_get_array (n); if (files_array == NULL) return FALSE; files_len = json_array_get_length (files_array); if (files_len == 0) return FALSE; for (j = 0; j < files_len; j++) { const char *filename; char *full_fn; n = json_array_get_element (files_array, j); if (n == NULL) return FALSE; if (json_node_get_node_type (n) == JSON_NODE_OBJECT) { obj = json_node_get_object (n); if (obj) { n = json_object_get_member (obj, "filename"); if (n == NULL) return FALSE; } } filename = json_node_get_string (n); if (filename == NULL) return FALSE; full_fn = g_strdup_printf ("%s/%s", dir, filename); load_file (adaptive, full_fn); g_free (full_fn); } return TRUE; } GST_ERROR ("requested version not found: %s", requested_version); return FALSE; }
gboolean json_deserialize_pspec (GValue *value, GParamSpec *pspec, JsonNode *node) { GValue node_value = { 0, }; gboolean retval = FALSE; if (G_TYPE_FUNDAMENTAL (G_VALUE_TYPE (value)) == G_TYPE_BOXED) { JsonNodeType node_type = json_node_get_node_type (node); GType boxed_type = G_VALUE_TYPE (value); if (json_boxed_can_deserialize (boxed_type, node_type)) { gpointer boxed = json_boxed_deserialize (boxed_type, node); g_value_take_boxed (value, boxed); return TRUE; } } switch (JSON_NODE_TYPE (node)) { case JSON_NODE_OBJECT: if (g_type_is_a (G_VALUE_TYPE (value), G_TYPE_OBJECT)) { GObject *object; object = json_gobject_new (G_VALUE_TYPE (value), json_node_get_object (node)); if (object != NULL) g_value_take_object (value, object); else g_value_set_object (value, NULL); retval = TRUE; } break; case JSON_NODE_ARRAY: if (G_VALUE_HOLDS (value, G_TYPE_STRV)) { JsonArray *array = json_node_get_array (node); guint i, array_len = json_array_get_length (array); GPtrArray *str_array = g_ptr_array_sized_new (array_len + 1); for (i = 0; i < array_len; i++) { JsonNode *val = json_array_get_element (array, i); if (JSON_NODE_TYPE (val) != JSON_NODE_VALUE) continue; if (json_node_get_string (val) != NULL) g_ptr_array_add (str_array, (gpointer) json_node_get_string (val)); } g_ptr_array_add (str_array, NULL); g_value_set_boxed (value, str_array->pdata); g_ptr_array_free (str_array, TRUE); retval = TRUE; } break; case JSON_NODE_VALUE: json_node_get_value (node, &node_value); #if 0 { gchar *node_str = g_strdup_value_contents (&node_value); g_debug ("%s: value type '%s' := node value type '%s' -> '%s'", G_STRLOC, g_type_name (G_VALUE_TYPE (value)), g_type_name (G_VALUE_TYPE (&node_value)), node_str); g_free (node_str); } #endif switch (G_TYPE_FUNDAMENTAL (G_VALUE_TYPE (value))) { case G_TYPE_BOOLEAN: case G_TYPE_INT64: case G_TYPE_STRING: if (G_VALUE_HOLDS (&node_value, G_VALUE_TYPE (value))) { g_value_copy (&node_value, value); retval = TRUE; } break; case G_TYPE_INT: if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { g_value_set_int (value, (gint) g_value_get_int64 (&node_value)); retval = TRUE; } break; case G_TYPE_CHAR: if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { g_value_set_schar (value, (gchar) g_value_get_int64 (&node_value)); retval = TRUE; } break; case G_TYPE_UINT: if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { g_value_set_uint (value, (guint) g_value_get_int64 (&node_value)); retval = TRUE; } break; case G_TYPE_UCHAR: if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { g_value_set_uchar (value, (guchar) g_value_get_int64 (&node_value)); retval = TRUE; } break; case G_TYPE_LONG: if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { g_value_set_long (value, (glong) g_value_get_int64 (&node_value)); retval = TRUE; } break; case G_TYPE_ULONG: if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { g_value_set_ulong (value, (gulong) g_value_get_int64 (&node_value)); retval = TRUE; } break; case G_TYPE_UINT64: if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { g_value_set_uint64 (value, (guint64) g_value_get_int64 (&node_value)); retval = TRUE; } break; case G_TYPE_DOUBLE: if (G_VALUE_HOLDS (&node_value, G_TYPE_DOUBLE)) { g_value_set_double (value, g_value_get_double (&node_value)); retval = TRUE; } else if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { g_value_set_double (value, (gdouble) g_value_get_int64 (&node_value)); retval = TRUE; } break; case G_TYPE_FLOAT: if (G_VALUE_HOLDS (&node_value, G_TYPE_DOUBLE)) { g_value_set_float (value, (gfloat) g_value_get_double (&node_value)); retval = TRUE; } else if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { g_value_set_float (value, (gfloat) g_value_get_int64 (&node_value)); retval = TRUE; } break; case G_TYPE_ENUM: { gint enum_value = 0; if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { enum_value = g_value_get_int64 (&node_value); retval = TRUE; } else if (G_VALUE_HOLDS (&node_value, G_TYPE_STRING)) { retval = enum_from_string (G_VALUE_TYPE (value), g_value_get_string (&node_value), &enum_value); } if (retval) g_value_set_enum (value, enum_value); } break; case G_TYPE_FLAGS: { gint flags_value = 0; if (G_VALUE_HOLDS (&node_value, G_TYPE_INT64)) { flags_value = g_value_get_int64 (&node_value); retval = TRUE; } else if (G_VALUE_HOLDS (&node_value, G_TYPE_STRING)) { retval = flags_from_string (G_VALUE_TYPE (value), g_value_get_string (&node_value), &flags_value); } if (retval) g_value_set_flags (value, flags_value); } break; default: retval = FALSE; break; } g_value_unset (&node_value); break; case JSON_NODE_NULL: if (G_TYPE_FUNDAMENTAL (G_VALUE_TYPE (value)) == G_TYPE_STRING) { g_value_set_string (value, NULL); retval = TRUE; } else if (G_TYPE_FUNDAMENTAL (G_VALUE_TYPE (value)) == G_TYPE_OBJECT) { g_value_set_object (value, NULL); retval = TRUE; } else retval = FALSE; break; } return retval; }
/** * melo_jsonrpc_parse_request: * @request: the JSON-RPC requrest serialized in a string * @length: the length og @request, can be -1 for null-terminated string * @error: a pointer to a #GError which is set if an error occurred * * Parse a string @request containing a JSON-RPC serialized request, call the * registered callback which match the request method and present the result * as a JSON-RPC response serialized in a string. * If the method is not registered, a JSON-RPC response is generated with the * error MELO_JSONRPC_ERROR_METHOD_NOT_FOUND. * * Returns: (transfer full): a string containing the serialized #JsonNode * corresponding to the respond to the JSON-RPC request. Use g_free() after * usage. */ gchar * melo_jsonrpc_parse_request (const gchar *request, gsize length, GError **error) { JsonParser *parser; JsonNodeType type; JsonNode *req; JsonNode *res; GError *err = NULL; gchar *str; /* Create parser */ parser = json_parser_new (); if (!parser) return melo_jsonrpc_build_error_str (MELO_JSONRPC_ERROR_INTERNAL_ERROR, "Internal error"); /* Parse request */ if (!json_parser_load_from_data (parser, request, length, &err) || (req = json_parser_get_root (parser)) == NULL) { g_clear_error (&err); goto parse_error; } /* Get node type */ type = json_node_get_node_type (req); /* Parse node */ if (type == JSON_NODE_OBJECT) { /* Parse single request */ res = melo_jsonrpc_parse_node (req); } else if (type == JSON_NODE_ARRAY) { /* Parse multiple requests: batch */ JsonArray *req_array; JsonArray *res_array; JsonNode *node; guint count, i; /* Get array from node */ req_array = json_node_get_array (req); count = json_array_get_length (req_array); if (!count) goto invalid; /* Create a new array for response */ res_array = json_array_sized_new (count); res = json_node_new (JSON_NODE_ARRAY); json_node_take_array (res, res_array); /* Parse each elements of array */ for (i = 0; i < count; i++) { /* Get element */ node = json_array_get_element (req_array, i); /* Process requesit */ node = melo_jsonrpc_parse_node (node); /* Add new response to array */ if (node) json_array_add_element (res_array, node); } /* Check if array is empty */ count = json_array_get_length (res_array); if (!count) { json_node_free (res); goto empty; } } else goto invalid; /* No response */ if (!res) goto empty; /* Generate final string */ str = melo_jsonrpc_node_to_string (res); /* Free parser and root node */ json_node_free (res); g_object_unref (parser); return str; parse_error: g_object_unref (parser); return melo_jsonrpc_build_error_str (MELO_JSONRPC_ERROR_PARSE_ERROR, "Parse error"); invalid: g_object_unref (parser); return melo_jsonrpc_build_error_str (MELO_JSONRPC_ERROR_INVALID_REQUEST, "Invalid request"); empty: g_object_unref (parser); return NULL; }
static int real_json_get (JsonParser *parser, const char *pathstr, int *n_objects, PRN *prn) { GError *gerr = NULL; JsonNode *match, *node; JsonPath *path; GType ntype; int err = 0; *n_objects = 0; node = json_parser_get_root(parser); if (node == NULL || json_node_is_null(node)) { gretl_errmsg_set("jsonget: got null root node"); return E_DATA; } path = json_path_new(); if (!json_path_compile(path, pathstr, &gerr)) { if (gerr != NULL) { gretl_errmsg_sprintf("jsonget: failed to compile JsonPath: %s", gerr->message); g_error_free(gerr); } else { gretl_errmsg_set("jsonget: failed to compile JsonPath"); } g_object_unref(path); return E_DATA; } match = json_path_match(path, node); if (null_node(match)) { /* FIXME : maybe return empty string? */ g_object_unref(path); return E_DATA; } /* in case we get floating-point output */ gretl_push_c_numeric_locale(); if (JSON_NODE_HOLDS_ARRAY(match)) { JsonArray *array = json_node_get_array(match); int len = 0, index = 0; if (non_empty_array(array)) { len = json_array_get_length(array); node = json_array_get_element(array, index); } else { node = NULL; } repeat: if (null_node(node)) { gretl_errmsg_set("jsonget: failed to match JsonPath"); ntype = 0; err = E_DATA; goto bailout; } else { ntype = json_node_get_value_type(node); } if (node != NULL && !handled_type(ntype)) { if (JSON_NODE_HOLDS_ARRAY(node)) { /* recurse on array type */ array = json_node_get_array(node); if (non_empty_array(array)) { node = json_array_get_element(array, 0); goto repeat; } } else if (json_node_get_node_type(node) == JSON_NODE_OBJECT) { err = excavate_json_object(node, n_objects, prn); if (!err) { if (index < len - 1) { node = json_array_get_element(array, ++index); goto repeat; } } } else { gretl_errmsg_sprintf("jsonget: unhandled array type '%s'", g_type_name(ntype)); err = E_DATA; } } else if (array != NULL) { int i, n = json_array_get_length(array); for (i=0; i<n && !err; i++) { node = json_array_get_element(array, i); err = output_json_node_value(node, prn); if (!err) { *n_objects += 1; if (n > 1) { pputc(prn, '\n'); } } } } } else { /* not an array-holding node */ err = output_json_node_value(match, prn); if (!err) { *n_objects += 1; } } bailout: gretl_pop_c_numeric_locale(); json_node_free(match); g_object_unref(path); return err; }
void cb_mini_tweet_parse_entities (CbMiniTweet *t, JsonObject *status) { JsonObject *extended_obj = status; JsonObject *entities; JsonArray *urls; JsonArray *hashtags; JsonArray *user_mentions; JsonArray *media_arrays[2]; int media_count; guint i, p; int url_index = 0; guint n_media_arrays = 0; guint n_reply_users = 0; guint non_reply_mentions = 0; int max_entities; gboolean direct_duplicate = FALSE; if (json_object_has_member (status, "extended_tweet")) extended_obj = json_object_get_object_member (status, "extended_tweet"); entities = json_object_get_object_member (extended_obj, "entities"); urls = json_object_get_array_member (entities, "urls"); hashtags = json_object_get_array_member (entities, "hashtags"); user_mentions = json_object_get_array_member (entities, "user_mentions"); media_count = json_object_get_member_size (entities, "media"); if (json_object_has_member (status, "extended_entities")) media_count += json_object_get_member_size (json_object_get_object_member (status, "extended_entities"), "media"); if (json_object_has_member (status, "in_reply_to_status_id") && !json_object_get_null_member (status, "in_reply_to_status_id")) { guint reply_index = 0; gint64 reply_to_user_id = 0; reply_to_user_id = json_object_get_int_member (status, "in_reply_to_user_id"); /* Check how many of the user mentions are reply mentions */ t->reply_id = json_object_get_int_member (status, "in_reply_to_status_id"); for (i = 0, p = json_array_get_length (user_mentions); i < p; i ++) { JsonObject *mention = json_node_get_object (json_array_get_element (user_mentions, i)); JsonArray *indices = json_object_get_array_member (mention, "indices"); gint64 user_id = json_object_get_int_member (mention, "id"); if (json_array_get_int_element (indices, 1) <= t->display_range_start) n_reply_users ++; else break; if (i == 0 && user_id == reply_to_user_id) direct_duplicate = TRUE; } if (!direct_duplicate) n_reply_users ++; t->reply_users = g_new0 (CbUserIdentity, n_reply_users); t->n_reply_users = n_reply_users; if (!direct_duplicate) { t->reply_users[0].id = reply_to_user_id; t->reply_users[0].screen_name = g_strdup (json_object_get_string_member (status, "in_reply_to_screen_name")); t->reply_users[0].user_name = g_strdup (""); reply_index = 1; } /* Now fill ->reply_users. The very first entry is always the user this tweet * *actually* replies to. */ for (i = 0; i < n_reply_users - (direct_duplicate ? 0 : 1); i ++) { JsonObject *mention = json_node_get_object (json_array_get_element (user_mentions, i)); t->reply_users[reply_index].id = json_object_get_int_member (mention, "id"); t->reply_users[reply_index].screen_name = g_strdup (json_object_get_string_member (mention, "screen_name")); t->reply_users[reply_index].user_name = g_strdup (json_object_get_string_member (mention, "name")); reply_index ++; } non_reply_mentions = n_reply_users - 1; } max_entities = json_array_get_length (urls) + json_array_get_length (hashtags) + json_array_get_length (user_mentions) - non_reply_mentions + media_count; media_count += (int)json_array_get_length (urls); t->medias = g_new0 (CbMedia*, media_count); t->entities = g_new0 (CbTextEntity, max_entities); /* * TODO: display_text and tooltip_text are often the same here, can we just set them to the * same value and only free one? */ /* URLS */ for (i = 0, p = json_array_get_length (urls); i < p; i ++) { JsonObject *url = json_node_get_object (json_array_get_element (urls, i)); const char *expanded_url = json_object_get_string_member (url, "expanded_url"); JsonArray *indices; if (is_media_candidate (expanded_url)) { t->medias[t->n_medias] = cb_media_new (); t->medias[t->n_medias]->url = g_strdup (expanded_url); t->medias[t->n_medias]->type = cb_media_type_from_url (expanded_url); t->medias[t->n_medias]->target_url = g_strdup (expanded_url); t->n_medias ++; } indices = json_object_get_array_member (url, "indices"); t->entities[url_index].from = json_array_get_int_element (indices, 0); t->entities[url_index].to = json_array_get_int_element (indices, 1); t->entities[url_index].display_text = cb_utils_escape_ampersands (json_object_get_string_member (url, "display_url")); t->entities[url_index].tooltip_text = cb_utils_escape_ampersands (expanded_url); t->entities[url_index].target = cb_utils_escape_ampersands (expanded_url); url_index ++; } /* HASHTAGS */ for (i = 0, p = json_array_get_length (hashtags); i < p; i ++) { JsonObject *hashtag = json_node_get_object (json_array_get_element (hashtags, i)); JsonArray *indices = json_object_get_array_member (hashtag, "indices"); const char *text = json_object_get_string_member (hashtag, "text"); t->entities[url_index].from = json_array_get_int_element (indices, 0); t->entities[url_index].to = json_array_get_int_element (indices, 1); t->entities[url_index].display_text = g_strdup_printf ("#%s", text); t->entities[url_index].tooltip_text = g_strdup_printf ("#%s", text); t->entities[url_index].target = NULL; url_index ++; } /* USER MENTIONS */ if (direct_duplicate) i = n_reply_users; else i = n_reply_users == 0 ? 0 : n_reply_users - 1; for (p = json_array_get_length (user_mentions); i < p; i ++) { JsonObject *mention = json_node_get_object (json_array_get_element (user_mentions, i)); JsonArray *indices = json_object_get_array_member (mention, "indices"); const char *screen_name = json_object_get_string_member (mention, "screen_name"); const char *id_str = json_object_get_string_member (mention, "id_str"); t->entities[url_index].from = json_array_get_int_element (indices, 0); t->entities[url_index].to = json_array_get_int_element (indices, 1); t->entities[url_index].display_text = g_strdup_printf ("@%s", screen_name); t->entities[url_index].tooltip_text = cb_utils_escape_ampersands (json_object_get_string_member (mention, "name")); t->entities[url_index].target = g_strdup_printf ("@%s/@%s", id_str, screen_name); url_index ++; } /* MEDIA */ if (json_object_has_member (entities, "media")) { JsonArray *medias = json_object_get_array_member (entities, "media"); for (i = 0, p = json_array_get_length (medias); i < p; i ++) { JsonObject *url = json_node_get_object (json_array_get_element (medias, i)); JsonArray *indices = json_object_get_array_member (url, "indices"); char *url_str = cb_utils_escape_ampersands (json_object_get_string_member (url, "url")); int k; gboolean duplicate = FALSE; /* Check for duplicates */ for (k = 0; k < url_index; k ++) { const char *target = t->entities[k].target; if (target != NULL && strcmp (target, url_str) == 0) { duplicate = TRUE; break; } } if (duplicate) { g_free (url_str); continue; } t->entities[url_index].from = json_array_get_int_element (indices, 0); t->entities[url_index].to = json_array_get_int_element (indices, 1); t->entities[url_index].display_text = cb_utils_escape_ampersands (json_object_get_string_member (url, "display_url")); t->entities[url_index].target = url_str; url_index ++; } } /* entities->media and extended_entities contain exactly the same media objects, but extended_entities is not always present, and entities->media doesn't contain all the attached media, so parse both the same way... */ if (json_object_has_member (entities, "media")) { media_arrays[n_media_arrays] = json_object_get_array_member (entities, "media"); n_media_arrays ++; } if (json_object_has_member (status, "extended_entities")) { media_arrays[n_media_arrays] = json_object_get_array_member (json_object_get_object_member (status, "extended_entities"), "media"); n_media_arrays ++; } for (i = 0; i < n_media_arrays; i ++) { guint x, k; for (x = 0, p = json_array_get_length (media_arrays[i]); x < p; x ++) { JsonObject *media_obj = json_node_get_object (json_array_get_element (media_arrays[i], x)); const char *media_type = json_object_get_string_member (media_obj, "type"); if (strcmp (media_type, "photo") == 0) { const char *url = json_object_get_string_member (media_obj, "media_url"); gboolean dup = FALSE; /* Remove duplicates */ for (k = 0; k < t->n_medias; k ++) { if (t->medias[k] != NULL && strcmp (t->medias[k]->url, url) == 0) { dup = TRUE; break; } } if (dup) continue; if (is_media_candidate (url)) { t->medias[t->n_medias] = cb_media_new (); t->medias[t->n_medias]->type = CB_MEDIA_TYPE_IMAGE; t->medias[t->n_medias]->url = g_strdup (url); t->medias[t->n_medias]->target_url = g_strdup_printf ("%s:orig", url); if (json_object_has_member (media_obj, "sizes")) { JsonObject *sizes = json_object_get_object_member (media_obj, "sizes"); JsonObject *medium = json_object_get_object_member (sizes, "medium"); t->medias[t->n_medias]->width = json_object_get_int_member (medium, "w"); t->medias[t->n_medias]->height = json_object_get_int_member (medium, "h"); } t->n_medias ++; } } else if (strcmp (media_type, "video") == 0 || strcmp (media_type, "animated_gif") == 0) { JsonObject *video_info = json_object_get_object_member (media_obj, "video_info"); JsonArray *variants = json_object_get_array_member (video_info, "variants"); JsonObject *variant = NULL; int thumb_width = -1; int thumb_height = -1; guint q; if (json_object_has_member (media_obj, "sizes")) { JsonObject *sizes = json_object_get_object_member (media_obj, "sizes"); JsonObject *medium = json_object_get_object_member (sizes, "medium"); thumb_width = json_object_get_int_member (medium, "w"); thumb_height = json_object_get_int_member (medium, "h"); } for (k = 0, q = json_array_get_length (variants); k < q; k ++) { JsonObject *v = json_node_get_object (json_array_get_element (variants, k)); if (strcmp (json_object_get_string_member (v, "content_type"), "application/x-mpegURL") == 0) { variant = v; break; } } if (variant == NULL && json_array_get_length (variants) > 0) variant = json_node_get_object (json_array_get_element (variants, 0)); if (variant != NULL) { guint n_media = t->n_medias; const char *thumb_url = json_object_get_string_member (media_obj, "media_url"); /* Some tweets have both a video and a thumbnail for that video attached. The tweet json * will list the image first. The url of the image and the thumb_url of the video will match */ for (k = 0; k < t->n_medias; k ++) { if (t->medias[k] != NULL && t->medias[k]->type == CB_MEDIA_TYPE_IMAGE && strcmp (t->medias[k]->url, thumb_url) == 0) { /* Replace this media */ g_object_unref (t->medias[k]); n_media = k; break; } } t->medias[n_media] = cb_media_new (); t->medias[n_media]->url = g_strdup (json_object_get_string_member (variant, "url")); t->medias[n_media]->thumb_url = g_strdup (thumb_url); t->medias[n_media]->type = CB_MEDIA_TYPE_TWITTER_VIDEO; t->medias[n_media]->width = thumb_width; t->medias[n_media]->height = thumb_height; if (n_media == t->n_medias) t->n_medias ++; } } else { g_debug ("Unhandled media type: %s", media_type); } } } t->n_entities = url_index; #if 0 g_debug ("Wasted entities: %d", max_entities - t->n_entities); g_debug ("Wasted media : %d", media_count - t->n_medias); #endif if (t->n_medias > 0) cb_media_downloader_load_all (cb_media_downloader_get_default (), t); if (t->n_entities > 0) { guint i, k; /* Sort entities. */ for (i = 0; i < t->n_entities; i ++) for (k = 0; k < t->n_entities; k++) if (t->entities[i].from < t->entities[k].from) { CbTextEntity tmp = { 0 }; cb_text_entity_copy (&t->entities[i], &tmp); cb_text_entity_copy (&t->entities[k], &t->entities[i]); cb_text_entity_copy (&tmp, &t->entities[k]); cb_text_entity_free (&tmp); } } }
static gboolean process_includes (RpmOstreeTreeComposeContext *self, GFile *treefile_path, guint depth, JsonObject *root, GCancellable *cancellable, GError **error) { gboolean ret = FALSE; const char *include_path; const guint maxdepth = 50; if (depth > maxdepth) { g_set_error (error, G_IO_ERROR, G_IO_ERROR_FAILED, "Exceeded maximum include depth of %u", maxdepth); goto out; } { g_autoptr(GFile) parent = g_file_get_parent (treefile_path); gboolean existed = FALSE; if (self->treefile_context_dirs->len > 0) { GFile *prev = self->treefile_context_dirs->pdata[self->treefile_context_dirs->len-1]; if (g_file_equal (parent, prev)) existed = TRUE; } if (!existed) { g_ptr_array_add (self->treefile_context_dirs, parent); parent = NULL; /* Transfer ownership */ } } if (!_rpmostree_jsonutil_object_get_optional_string_member (root, "include", &include_path, error)) goto out; if (include_path) { g_autoptr(GFile) treefile_dirpath = g_file_get_parent (treefile_path); g_autoptr(GFile) parent_path = g_file_resolve_relative_path (treefile_dirpath, include_path); glnx_unref_object JsonParser *parent_parser = json_parser_new (); JsonNode *parent_rootval; JsonObject *parent_root; GList *members; GList *iter; if (!json_parser_load_from_file (parent_parser, gs_file_get_path_cached (parent_path), error)) goto out; parent_rootval = json_parser_get_root (parent_parser); if (!JSON_NODE_HOLDS_OBJECT (parent_rootval)) { g_set_error (error, G_IO_ERROR, G_IO_ERROR_FAILED, "Treefile root is not an object"); goto out; } parent_root = json_node_get_object (parent_rootval); if (!process_includes (self, parent_path, depth + 1, parent_root, cancellable, error)) goto out; members = json_object_get_members (parent_root); for (iter = members; iter; iter = iter->next) { const char *name = iter->data; JsonNode *parent_val = json_object_get_member (parent_root, name); JsonNode *val = json_object_get_member (root, name); g_assert (parent_val); if (!val) json_object_set_member (root, name, json_node_copy (parent_val)); else { JsonNodeType parent_type = json_node_get_node_type (parent_val); JsonNodeType child_type = json_node_get_node_type (val); if (parent_type != child_type) { g_set_error (error, G_IO_ERROR, G_IO_ERROR_FAILED, "Conflicting element type of '%s'", name); goto out; } if (child_type == JSON_NODE_ARRAY) { JsonArray *parent_array = json_node_get_array (parent_val); JsonArray *child_array = json_node_get_array (val); JsonArray *new_child = json_array_new (); guint i, len; len = json_array_get_length (parent_array); for (i = 0; i < len; i++) json_array_add_element (new_child, json_node_copy (json_array_get_element (parent_array, i))); len = json_array_get_length (child_array); for (i = 0; i < len; i++) json_array_add_element (new_child, json_node_copy (json_array_get_element (child_array, i))); json_object_set_array_member (root, name, new_child); } } } json_object_remove_member (root, "include"); } ret = TRUE; out: return ret; }
static gboolean catch_resource_group_api_parse_objects (CatchResourceGroup *group, JsonNode *result, guint *result_offset, guint *n_resources, GError **error) { CatchResourceGroupPrivate *priv; CatchResource *resource; JsonObject *obj; JsonArray *array; JsonNode *element; gboolean set_first_page = FALSE; gboolean ret = FALSE; GType resource_type; guint i; guint length; guint offset = 0; ENTRY; g_return_val_if_fail(CATCH_IS_RESOURCE_GROUP(group), FALSE); g_return_val_if_fail(result != NULL, FALSE); priv = group->priv; g_object_freeze_notify(G_OBJECT(group)); if (!JSON_NODE_HOLDS_OBJECT(result) || !(obj = json_node_get_object(result)) || !json_object_has_member(obj, "objects") || !JSON_NODE_HOLDS_ARRAY(json_object_get_member(obj, "objects")) || !(array = json_object_get_array_member(obj, "objects"))) { g_set_error(error, CATCH_API_ERROR, CATCH_API_ERROR_BAD_RESPONSE, _("The resonse did not contain the objects array.")); GOTO(failure); } if (json_object_has_member(obj, "count") && JSON_NODE_HOLDS_VALUE(json_object_get_member(obj, "count"))) { priv->count = json_object_get_int_member(obj, "count"); set_first_page = TRUE; g_object_notify(G_OBJECT(group), "count"); } if (json_object_has_member(obj, "offset") && JSON_NODE_HOLDS_VALUE(json_object_get_member(obj, "offset"))) { offset = json_object_get_int_member(obj, "offset"); } length = json_array_get_length(array); for (i = 0; i < length; i++) { element = json_array_get_element(array, i); if (!(resource_type = object_peek_type(element))) { g_set_error(error, CATCH_API_ERROR, CATCH_API_ERROR_BAD_RESPONSE, _("The JSON object did not contain a \"type\" field.")); GOTO(failure); } resource = g_object_new(resource_type, NULL); g_assert(CATCH_IS_RESOURCE(resource)); if (!catch_resource_load_from_json(resource, element, error)) { g_object_unref(resource); GOTO(failure); } catch_resource_group_set_resource(CATCH_RESOURCE_GROUP(group), offset + i, resource); g_object_unref(resource); } if (set_first_page) { priv->first_page_size = length; } if (result_offset) { *result_offset = offset; } if (n_resources) { *n_resources = length; } ret = TRUE; failure: g_object_thaw_notify(G_OBJECT(group)); RETURN(ret); }
static void ide_langserv_rename_provider_rename_cb (GObject *object, GAsyncResult *result, gpointer user_data) { IdeLangservClient *client = (IdeLangservClient *)object; IdeLangservRenameProvider *self; g_autoptr(JsonNode) return_value = NULL; g_autoptr(GError) error = NULL; g_autoptr(GTask) task = user_data; g_autoptr(GPtrArray) ret = NULL; JsonObject *changes_by_uri = NULL; JsonObjectIter iter; const gchar *uri; IdeContext *context; JsonNode *changes; IDE_ENTRY; g_assert (IDE_IS_LANGSERV_CLIENT (client)); g_assert (G_IS_ASYNC_RESULT (result)); g_assert (G_IS_TASK (task)); self = g_task_get_source_object (task); g_assert (IDE_IS_LANGSERV_RENAME_PROVIDER (self)); if (!ide_langserv_client_call_finish (client, result, &return_value, &error)) { g_task_return_error (task, g_steal_pointer (&error)); IDE_EXIT; } if (!JCON_EXTRACT (return_value, "changes", JCONE_OBJECT (changes_by_uri))) IDE_EXIT; context = ide_object_get_context (IDE_OBJECT (self)); ret = g_ptr_array_new_with_free_func (g_object_unref); json_object_iter_init (&iter, changes_by_uri); while (json_object_iter_next (&iter, &uri, &changes)) { g_autoptr(GFile) gfile = g_file_new_for_uri (uri); g_autoptr(IdeFile) ifile = ide_file_new (context, gfile); JsonArray *array; guint length; if (!JSON_NODE_HOLDS_ARRAY (changes)) continue; array = json_node_get_array (changes); length = json_array_get_length (array); for (guint i = 0; i < length; i++) { JsonNode *change = json_array_get_element (array, i); g_autoptr(IdeSourceLocation) begin_location = NULL; g_autoptr(IdeSourceLocation) end_location = NULL; g_autoptr(IdeSourceRange) range = NULL; g_autoptr(IdeProjectEdit) edit = NULL; const gchar *new_text = NULL; gboolean success; struct { gint line; gint column; } begin, end; success = JCON_EXTRACT (change, "range", "{", "start", "{", "line", JCONE_INT (begin.line), "character", JCONE_INT (begin.column), "}", "end", "{", "line", JCONE_INT (end.line), "character", JCONE_INT (end.column), "}", "}", "newText", JCONE_STRING (new_text) ); if (!success) continue; begin_location = ide_source_location_new (ifile, begin.line, begin.column, 0); end_location = ide_source_location_new (ifile, end.line, end.column, 0); range = ide_source_range_new (begin_location, end_location); edit = g_object_new (IDE_TYPE_PROJECT_EDIT, "range", range, "replacement", new_text, NULL); g_ptr_array_add (ret, g_steal_pointer (&edit)); } } g_task_return_pointer (task, g_steal_pointer (&ret), (GDestroyNotify)g_ptr_array_unref); IDE_EXIT; }
static void om_got_events(OmegleAccount *oma, gchar *response, gsize len, gpointer userdata) { //[["waiting"], ["connected"]] gchar *who = userdata; const gchar *message; const gchar *event_type; JsonParser *parser; JsonNode *rootnode, *currentnode; JsonArray *array, *current; guint i; purple_debug_info("omegle", "got events: %s\n", response?response:"(null)"); if (!response || g_str_equal(response, "null")) { g_free(who); return; } parser = json_parser_new(); json_parser_load_from_data(parser, response, len, NULL); rootnode = json_parser_get_root(parser); if (!rootnode) { g_object_unref(parser); return; } array = json_node_get_array(rootnode); for(i=0; i<json_array_get_length(array); i++) { currentnode = json_array_get_element(array, i); current = json_node_get_array(currentnode); event_type = json_node_get_string(json_array_get_element(current, 0)); if (!event_type) { continue; } else if (g_str_equal(event_type, "waiting")) { serv_got_im(oma->pc, who, "Looking for someone you can chat with. Hang on.", PURPLE_MESSAGE_SYSTEM, time(NULL)); } else if (g_str_equal(event_type, "connected")) { serv_got_im(oma->pc, who, "You're now chatting with a random stranger. Say hi!", PURPLE_MESSAGE_SYSTEM, time(NULL)); } else if (g_str_equal(event_type, "gotMessage")) { //[["gotMessage","message goes here"]] message = json_node_get_string(json_array_get_element(current, 1)); if (message) serv_got_im(oma->pc, who, message, PURPLE_MESSAGE_RECV, time(NULL)); } else if (g_str_equal(event_type, "typing")) { serv_got_typing(oma->pc, who, 10, PURPLE_TYPING); } else if (g_str_equal(event_type, "stoppedTyping")) { serv_got_typing(oma->pc, who, 10, PURPLE_TYPED); } else if (g_str_equal(event_type, "strangerDisconnected")) { serv_got_im(oma->pc, who, "Your conversational partner has disconnected", PURPLE_MESSAGE_SYSTEM, time(NULL)); } } om_fetch_events(oma, g_strdup(who)); g_free(who); g_object_unref(parser); }
GHashTable * cockpit_package_listing (JsonArray **json) { JsonArray *root = NULL; GHashTable *listing; CockpitPackage *package; GHashTable *ids; JsonObject *object; JsonArray *id; GList *names, *l; GList *packages; const gchar *name; JsonNode *node; JsonArray *array; guint i, length; listing = g_hash_table_new_full (g_str_hash, g_str_equal, NULL, cockpit_package_unref); build_package_listing (listing); /* Add aliases to the listing */ packages = g_hash_table_get_values (listing); packages = g_list_sort (packages, compar_packages); g_list_foreach (packages, (GFunc)cockpit_package_ref, NULL); for (l = packages; l != NULL; l = g_list_next (l)) { package = l->data; node = json_object_get_member (package->manifest, "alias"); if (node) { /* * Process and remove "alias" from the manifest, as it results in * confusing and duplicated information for the front end. */ package->alias = node = json_node_copy (node); json_object_remove_member (package->manifest, "alias"); if (JSON_NODE_HOLDS_ARRAY (node)) { array = json_node_get_array (node); length = json_array_get_length (array); for (i = 0; i < length; i++) add_alias_to_listing (listing, package, json_array_get_element (array, i)); } else { add_alias_to_listing (listing, package, node); } } } g_list_free_full (packages, (GDestroyNotify)cockpit_package_unref); /* Now wrap up the checksums */ finish_checksums (listing); /* Add checksums to the listing */ packages = g_hash_table_get_values (listing); g_list_foreach (packages, (GFunc)cockpit_package_ref, NULL); for (l = packages; l != NULL; l = g_list_next (l)) { package = l->data; if (package->checksum && !g_hash_table_contains (listing, package->checksum)) { g_hash_table_replace (listing, package->checksum, cockpit_package_ref (package)); g_debug ("%s: package has checksum: %s", package->name, package->checksum); } } g_list_free_full (packages, (GDestroyNotify)cockpit_package_unref); /* Build JSON packages block */ if (json) { *json = root = json_array_new (); ids = g_hash_table_new (g_direct_hash, g_direct_equal); names = g_hash_table_get_keys (listing); names = g_list_sort (names, (GCompareFunc)strcmp); for (l = names; l != NULL; l = g_list_next (l)) { name = l->data; package = g_hash_table_lookup (listing, name); id = g_hash_table_lookup (ids, package); if (!id) { object = json_object_new (); id = json_array_new(); /* The actual package name always comes first */ json_object_set_array_member (object, "id", id); json_array_add_string_element (id, package->name); g_hash_table_insert (ids, package, id); json_object_set_object_member (object, "manifest", json_object_ref (package->manifest)); json_array_add_object_element (root, object); } /* Other ways to refer to the package */ if (!g_str_equal (name, package->name)) json_array_add_string_element (id, name); } g_list_free (names); g_hash_table_destroy (ids); } return listing; }
GList *fb_get_buddies_friend_list (FacebookAccount *fba, const gchar *uid, JsonArray *friend_list_ids) { GSList *buddies; GSList *cur; GHashTable *cur_groups; int i; GList *final_buddies, *cur_buddy; PurpleGroup *fb_group; PurpleBuddy *buddy; final_buddies = NULL; buddies = purple_find_buddies(fba->account, uid); // If we're already in the buddy list, stop. Ignore FB info because // it will be incorrect. if (atoll(uid) == fba->uid && buddies != NULL) { purple_debug_info("facebook", "already have buddies for self, not adding\n"); for (cur = buddies; cur != NULL; cur = cur->next) { final_buddies = g_list_append( final_buddies, cur->data); } g_slist_free(buddies); return final_buddies; } //Do we want to ignore groups? if (!purple_account_get_bool(fba->account, "facebook_use_groups", TRUE)) { if (buddies != NULL) { //Copy the slist into the list for (cur = buddies; cur != NULL; cur = cur->next) { final_buddies = g_list_append( final_buddies, cur->data); } g_slist_free(buddies); return final_buddies; } else { buddy = purple_buddy_new(fba->account, uid, NULL); fb_group = purple_find_group(DEFAULT_GROUP_NAME); if (fb_group == NULL) { fb_group = purple_group_new(DEFAULT_GROUP_NAME); purple_blist_add_group(fb_group, NULL); } purple_blist_add_buddy(buddy, NULL, fb_group, NULL); final_buddies = g_list_append(final_buddies, buddy); return final_buddies; } } // Determine what buddies exist and what groups they are in. cur_groups = g_hash_table_new_full(g_str_hash, g_str_equal, g_free, NULL); for (cur = buddies; cur != NULL; cur = cur->next) { const gchar *group_name; group_name = purple_group_get_name(purple_buddy_get_group( (PurpleBuddy *)cur->data)); group_name = purple_normalize_nocase(NULL, group_name); g_hash_table_insert(cur_groups, g_strdup(group_name), cur->data); } g_slist_free(buddies); // Create/insert necessary buddies if (friend_list_ids) { for (i = 0; i < json_array_get_length(friend_list_ids); i++) { const gchar *friend_list_id; friend_list_id = json_node_get_string( json_array_get_element(friend_list_ids, i)); buddy = add_buddy(fba, friend_list_id, uid, cur_groups); final_buddies = g_list_append(final_buddies, buddy); } } else { // No friend list data, so we use the default group. final_buddies = g_list_append(final_buddies, add_buddy(fba, "-1", uid, cur_groups)); } // Figure out which groups/buddies are not represented. for (cur_buddy = final_buddies; cur_buddy != NULL; cur_buddy = cur_buddy->next) { const gchar *group_name = purple_group_get_name(purple_buddy_get_group( (PurpleBuddy *)cur_buddy->data)); g_hash_table_remove(cur_groups, purple_normalize_nocase(NULL, group_name)); } // Delete remaining buddies to maintain sync state with server. g_hash_table_foreach(cur_groups, destroy_buddy, fba); // Cleanup! g_hash_table_destroy(cur_groups); return final_buddies; }
static void spin_receive_friends_cb(PurpleUtilFetchUrlData* url_data, gpointer userp, JsonNode* node, const gchar* error_message) { PurpleConnection* gc = (PurpleConnection*) userp; if(!PURPLE_CONNECTION_IS_VALID(gc)) return; if(!node) { purple_debug_error("spin","friend list error:%s\n",error_message); purple_connection_error_reason(gc,PURPLE_CONNECTION_ERROR_NETWORK_ERROR, _("could not receive friend list")); return; } SpinData* spin = (SpinData*) gc->proto_data; PurpleAccount* account = purple_connection_get_account(gc); GHashTable* found_buddies = g_hash_table_new(g_direct_hash,g_direct_equal); GSList* account_buddies = purple_find_buddies(account,NULL); if(!node || JSON_NODE_TYPE(node) != JSON_NODE_ARRAY) { purple_connection_error_reason (gc,PURPLE_CONNECTION_ERROR_NETWORK_ERROR, _("invalid friend list format")); goto exit; } JsonArray* friends = json_node_get_array(node); guint i; for(i = 0; i < json_array_get_length(friends); ++i) { node = json_array_get_element(friends,i); JsonArray* entry; if(JSON_NODE_TYPE(node) != JSON_NODE_ARRAY || json_array_get_length(entry = json_node_get_array(node)) != 7) { purple_debug_info("spin","invalid friend list entry\n"); continue; } const gchar* id = json_node_get_string(json_array_get_element(entry,0)); const gchar* name = json_node_get_string(json_array_get_element(entry,1)); guint online = json_node_get_int(json_array_get_element(entry,2)); const gchar* away = json_node_get_string(json_array_get_element(entry,3)); const gchar* photo =json_node_get_string(json_array_get_element(entry,5)); purple_debug_info("spin","got friend info: %s %s %i %s %s\n", id,name,online, away,photo); if(!name || !away || !photo || !id) continue; PurpleBuddy* buddy = spin_sync_buddy(spin,account_buddies,id,name, online,away,photo); g_hash_table_insert(found_buddies,buddy,(gpointer)0x1); } GSList* b; for(b = account_buddies; b; b = b->next) { if(!g_hash_table_lookup(found_buddies,b->data)) { spin_notify_nick_removed (spin,purple_buddy_get_name((PurpleBuddy*) b->data)); purple_blist_remove_buddy((PurpleBuddy*)b->data); } } spin_connect_add_state(spin,SPIN_STATE_GOT_INITIAL_FRIEND_LIST); exit: g_slist_free(account_buddies); g_hash_table_destroy(found_buddies); }
static GPtrArray * gs_plugin_odrs_parse_reviews (GsPlugin *plugin, const gchar *data, gssize data_len, GError **error) { JsonArray *json_reviews; JsonNode *json_root; guint i; g_autoptr(JsonParser) json_parser = NULL; g_autoptr(GPtrArray) reviews = NULL; /* nothing */ if (data == NULL) { g_set_error_literal (error, GS_PLUGIN_ERROR, GS_PLUGIN_ERROR_INVALID_FORMAT, "server returned no data"); return NULL; } /* parse the data and find the array or ratings */ json_parser = json_parser_new (); if (!json_parser_load_from_data (json_parser, data, data_len, error)) { gs_utils_error_convert_json_glib (error); return NULL; } json_root = json_parser_get_root (json_parser); if (json_root == NULL) { g_set_error_literal (error, GS_PLUGIN_ERROR, GS_PLUGIN_ERROR_INVALID_FORMAT, "no root"); return NULL; } if (json_node_get_node_type (json_root) != JSON_NODE_ARRAY) { g_set_error_literal (error, GS_PLUGIN_ERROR, GS_PLUGIN_ERROR_INVALID_FORMAT, "no array"); return NULL; } /* parse each rating */ reviews = g_ptr_array_new_with_free_func ((GDestroyNotify) g_object_unref); json_reviews = json_node_get_array (json_root); for (i = 0; i < json_array_get_length (json_reviews); i++) { JsonNode *json_review; JsonObject *json_item; g_autoptr(AsReview) review = NULL; /* extract the data */ json_review = json_array_get_element (json_reviews, i); if (json_node_get_node_type (json_review) != JSON_NODE_OBJECT) { g_set_error_literal (error, GS_PLUGIN_ERROR, GS_PLUGIN_ERROR_INVALID_FORMAT, "no object type"); return NULL; } json_item = json_node_get_object (json_review); if (json_item == NULL) { g_set_error_literal (error, GS_PLUGIN_ERROR, GS_PLUGIN_ERROR_INVALID_FORMAT, "no object"); return NULL; } /* create review */ review = gs_plugin_odrs_parse_review_object (plugin, json_item); g_ptr_array_add (reviews, g_object_ref (review)); } return g_steal_pointer (&reviews); }
/** * json_reader_read_element: * @reader: a #JsonReader * @index_: the index of the element * * Advances the cursor of @reader to the element @index_ of the array * or the object at the current position. * * You can use the json_reader_get_value* family of functions to retrieve * the value of the element; for instance: * * |[ * json_reader_read_element (reader, 0); * int_value = json_reader_get_int_value (reader); * ]| * * After reading the value, json_reader_end_element() should be called to * reposition the cursor inside the #JsonReader, e.g.: * * |[ * json_reader_read_element (reader, 1); * str_value = json_reader_get_string_value (reader); * json_reader_end_element (reader); * * json_reader_read_element (reader, 2); * str_value = json_reader_get_string_value (reader); * json_reader_end_element (reader); * ]| * * If @reader is not currently on an array or an object, or if the @index_ is * bigger than the size of the array or the object, the #JsonReader will be * put in an error state until json_reader_end_element() is called. * * Return value: %TRUE on success, and %FALSE otherwise * * Since: 0.12 */ gboolean json_reader_read_element (JsonReader *reader, guint index_) { JsonReaderPrivate *priv; g_return_val_if_fail (JSON_READER (reader), FALSE); json_reader_return_val_if_error_set (reader, FALSE); priv = reader->priv; if (priv->current_node == NULL) priv->current_node = priv->root; if (!(JSON_NODE_HOLDS_ARRAY (priv->current_node) || JSON_NODE_HOLDS_OBJECT (priv->current_node))) return json_reader_set_error (reader, JSON_READER_ERROR_NO_ARRAY, "The current node is of type '%s', but " "an array or an object was expected.", json_node_type_name (priv->current_node)); switch (json_node_get_node_type (priv->current_node)) { case JSON_NODE_ARRAY: { JsonArray *array = json_node_get_array (priv->current_node); if (index_ >= json_array_get_length (array)) return json_reader_set_error (reader, JSON_READER_ERROR_INVALID_INDEX, "The index '%d' is greater than the size " "of the array at the current position.", index_); priv->previous_node = priv->current_node; priv->current_node = json_array_get_element (array, index_); } break; case JSON_NODE_OBJECT: { JsonObject *object = json_node_get_object (priv->current_node); GList *members; const gchar *name; if (index_ >= json_object_get_size (object)) return json_reader_set_error (reader, JSON_READER_ERROR_INVALID_INDEX, "The index '%d' is greater than the size " "of the object at the current position.", index_); priv->previous_node = priv->current_node; g_free (priv->current_member); members = json_object_get_members (object); name = g_list_nth_data (members, index_); priv->current_node = json_object_get_member (object, name); priv->current_member = g_strdup (name); g_list_free (members); } break; default: g_assert_not_reached (); return FALSE; } return TRUE; }
static gboolean catch_resource_group_api_parse_activities (CatchResourceGroup *group, JsonNode *result, guint *result_offset, guint *n_resources, GError **error) { CatchResourceGroupPrivate *priv; CatchResource *resource; JsonObject *obj; JsonArray *array; JsonNode *element; gboolean set_first_page = FALSE; gboolean ret = FALSE; guint i; guint length; guint offset = 0; ENTRY; g_return_val_if_fail(CATCH_IS_RESOURCE_GROUP(group), FALSE); g_return_val_if_fail(result != NULL, FALSE); priv = group->priv; g_object_freeze_notify(G_OBJECT(group)); if (!JSON_NODE_HOLDS_OBJECT(result) || !(obj = json_node_get_object(result)) || !json_object_has_member(obj, "activities") || !JSON_NODE_HOLDS_ARRAY(json_object_get_member(obj, "activities")) || !(array = json_object_get_array_member(obj, "activities"))) { g_set_error(error, CATCH_API_ERROR, CATCH_API_ERROR_BAD_RESPONSE, _("The resonse was invalid.")); GOTO(failure); } if (json_object_has_member(obj, "count") && JSON_NODE_HOLDS_VALUE(json_object_get_member(obj, "count"))) { priv->count = json_object_get_int_member(obj, "count"); set_first_page = TRUE; g_object_notify_by_pspec(G_OBJECT(group), gParamSpecs[PROP_COUNT]); } if (json_object_has_member(obj, "offset") && JSON_NODE_HOLDS_VALUE(json_object_get_member(obj, "offset"))) { offset = json_object_get_int_member(obj, "offset"); } length = json_array_get_length(array); for (i = 0; i < length; i++) { element = json_array_get_element(array, i); resource = g_object_new(CATCH_TYPE_ACTIVITY, NULL); if (!catch_resource_load_from_json(resource, element, error)) { g_object_unref(resource); GOTO(failure); } catch_resource_group_set_resource(CATCH_RESOURCE_GROUP(group), offset + i, resource); g_object_unref(resource); } if (set_first_page) { priv->first_page_size = length; } if (result_offset) { *result_offset = offset; } if (n_resources) { *n_resources = length; } ret = TRUE; failure: g_object_thaw_notify(G_OBJECT(group)); RETURN(ret); }
static gboolean melo_jsonrpc_get_json_node (JsonArray *schema_params, JsonNode *params, JsonObject *obj, JsonArray *array, JsonNode **error) { JsonObject *schema; JsonNodeType type; JsonNode *node; guint count, i; /* Check schema */ if (!schema_params) return FALSE; /* No params to check */ if (!params) { if (error && *error == NULL) *error = melo_jsonrpc_build_error_node ( MELO_JSONRPC_ERROR_INVALID_REQUEST, "Invalid request"); return FALSE; } /* Get element count from schema */ count = json_array_get_length (schema_params); /* Get type */ type = json_node_get_node_type (params); /* Already an object */ if (type == JSON_NODE_OBJECT) { const gchar *name; JsonObject *o; /* Get object */ o = json_node_get_object (params); /* Parse object */ for (i = 0; i < count; i++) { /* Get next schema object */ schema = json_array_get_object_element (schema_params, i); if (!schema) goto failed; /* Get parameter name */ name = json_object_get_string_member (schema, "name"); if (!name) goto failed; /* Get node */ node = json_object_get_member (o, name); if (!node) { /* Get required flag: failed if not defined or TRUE */ node = json_object_get_member (schema, "required"); if (!node || (node && json_node_get_boolean (node))) goto failed; /* When not required: * - skip when converting to an object, * - stop when converting to an array. */ if (obj) continue; if (array) return TRUE; } /* Check node type */ if (!melo_jsonrpc_add_node (node, schema, obj, array)) goto failed; } } else if (type == JSON_NODE_ARRAY) { guint params_count; JsonArray *a; /* Get array */ a = json_node_get_array (params); params_count = json_array_get_length (a); /* Parse object */ for (i = 0; i < count; i++) { /* Get next schema object */ schema = json_array_get_object_element (schema_params, i); if (!schema) goto failed; /* No more parameters available */ if (i >= params_count) { /* Get required flag: failed if not defined or TRUE */ node = json_object_get_member (schema, "required"); if (!node || (node && json_node_get_boolean (node))) goto failed; /* If this parameter was not required: stop conversion */ return TRUE; } /* Get node */ node = json_array_get_element (a, i); if (!node) goto failed; /* Check node type */ if (!melo_jsonrpc_add_node (node, schema, obj, array)) goto failed; } } return TRUE; failed: if (error && *error == NULL) *error = melo_jsonrpc_build_error_node (MELO_JSONRPC_ERROR_INVALID_PARAMS, "Invalid params"); return FALSE; }