static void parse_csvfile_rowend(int c, void *user_data) { bgpstream_csvfile_datasource_t *csvfile_ds = (bgpstream_csvfile_datasource_t *) user_data; /* if the number of fields read is compliant with the expected file format */ if(csvfile_ds->current_field == CSVFILE_FIELDCNT) { /* check if the timestamp is acceptable */ if(csvfile_ds->timestamp > csvfile_ds->last_processed_ts && csvfile_ds->timestamp <= csvfile_ds->max_accepted_ts) { /* update max in file timestamp */ if(csvfile_ds->timestamp > csvfile_ds->max_ts_infile) { csvfile_ds->max_ts_infile = csvfile_ds->timestamp; } if(bgpstream_csvfile_datasource_filter_ok(csvfile_ds)) { csvfile_ds->num_results += bgpstream_input_mgr_push_sorted_input(csvfile_ds->input_mgr, strdup(csvfile_ds->filename), strdup(csvfile_ds->project), strdup(csvfile_ds->collector), strdup(csvfile_ds->bgp_type), csvfile_ds->filetime, csvfile_ds->time_span); } } } csvfile_ds->current_field = 0; }
int bgpstream_singlefile_datasource_update_input_queue(bgpstream_singlefile_datasource_t* singlefile_ds, bgpstream_input_mgr_t *input_mgr) { bgpstream_debug("\t\tBSDS_CLIST: singlefile_ds update input queue start"); struct timeval tv; gettimeofday(&tv, NULL); uint32_t now = tv.tv_sec; int num_results = 0; /* check digest, if different (or first) then add files to input queue) */ if(singlefile_ds->rib_filename[0] != '\0' && now - singlefile_ds->last_rib_filetime > RIB_FREQUENCY_CHECK && same_header(singlefile_ds->rib_filename, singlefile_ds->rib_header) == 0) { /* fprintf(stderr, "new RIB at: %"PRIu32"\n", now); */ singlefile_ds->last_rib_filetime = now; num_results += bgpstream_input_mgr_push_sorted_input(input_mgr, strdup(singlefile_ds->rib_filename), strdup("singlefile_ds"), strdup("singlefile_ds"), strdup("ribs"), singlefile_ds->last_rib_filetime, RIB_FREQUENCY_CHECK); } if(singlefile_ds->update_filename[0] != '\0' && now - singlefile_ds->last_update_filetime > UPDATE_FREQUENCY_CHECK && same_header(singlefile_ds->update_filename, singlefile_ds->update_header) == 0) { /* fprintf(stderr, "new updates at: %"PRIu32"\n", now); */ singlefile_ds->last_update_filetime = now; num_results += bgpstream_input_mgr_push_sorted_input(input_mgr, strdup(singlefile_ds->update_filename), strdup("singlefile_ds"), strdup("singlefile_ds"), strdup("updates"), singlefile_ds->last_update_filetime, UPDATE_FREQUENCY_CHECK); } bgpstream_debug("\t\tBSDS_CLIST: singlefile_ds update input queue end"); return num_results; }
static int process_json(bgpstream_broker_datasource_t *broker_ds, bgpstream_input_mgr_t *input_mgr, const char *js, jsmntok_t *root_tok, size_t count) { int i, j, k; jsmntok_t *t = root_tok + 1; int arr_len, obj_len; int time_set = 0; int num_results = 0; // per-file info char *url = NULL; size_t url_len = 0; int url_set = 0; char collector[BGPSTREAM_UTILS_STR_NAME_LEN] = ""; int collector_set = 0; char project[BGPSTREAM_UTILS_STR_NAME_LEN] = ""; int project_set = 0; char type[BGPSTREAM_UTILS_STR_NAME_LEN] = ""; int type_set = 0; uint32_t initial_time = 0; int initial_time_set = 0; uint32_t duration = 0; int duration_set = 0; if (count == 0) { fprintf(stderr, "ERROR: Empty JSON response from broker\n"); goto retry; } if (root_tok->type != JSMN_OBJECT) { fprintf(stderr, "ERROR: Root object is not JSON\n"); fprintf(stderr, "INFO: JSON: %s\n", js); goto err; } // iterate over the children of the root object for (i = 0; i < root_tok->size; i++) { // all keys must be strings if (t->type != JSMN_STRING) { fprintf(stderr, "ERROR: Encountered non-string key: '%.*s'\n", t->end - t->start, js + t->start); goto err; } if (json_strcmp(js, t, "time") == 0) { NEXT_TOK; json_type_assert(t, JSMN_PRIMITIVE); json_strtoul(broker_ds->last_response_time, t); time_set = 1; NEXT_TOK; } else if (json_strcmp(js, t, "type") == 0) { NEXT_TOK; json_str_assert(js, t, "data"); NEXT_TOK; } else if (json_strcmp(js, t, "error") == 0) { NEXT_TOK; if (json_isnull(js, t) == 0) { // i.e. there is an error set fprintf(stderr, "ERROR: Broker reported an error: %.*s\n", t->end - t->start, js + t->start); goto err; } NEXT_TOK; } else if (json_strcmp(js, t, "queryParameters") == 0) { NEXT_TOK; json_type_assert(t, JSMN_OBJECT); // skip over this object t = json_skip(t); } else if (json_strcmp(js, t, "data") == 0) { NEXT_TOK; json_type_assert(t, JSMN_OBJECT); NEXT_TOK; json_str_assert(js, t, "dumpFiles"); NEXT_TOK; json_type_assert(t, JSMN_ARRAY); arr_len = t->size; // number of dump files NEXT_TOK; // first elem in array for (j = 0; j < arr_len; j++) { json_type_assert(t, JSMN_OBJECT); obj_len = t->size; NEXT_TOK; url_set = 0; project_set = 0; collector_set = 0; type_set = 0; initial_time_set = 0; duration_set = 0; for (k = 0; k < obj_len; k++) { if (json_strcmp(js, t, "urlType") == 0) { NEXT_TOK; if (json_strcmp(js, t, "simple") != 0) { // not yet supported? fprintf(stderr, "ERROR: Unsupported URL type '%.*s'\n", t->end - t->start, js + t->start); goto err; } NEXT_TOK; } else if (json_strcmp(js, t, "url") == 0) { NEXT_TOK; json_type_assert(t, JSMN_STRING); if (url_len < (t->end - t->start + 1)) { url_len = t->end - t->start + 1; if ((url = realloc(url, url_len)) == NULL) { fprintf(stderr, "ERROR: Could not realloc URL string\n"); goto err; } } json_strcpy(url, t, js); unescape_url(url); url_set = 1; NEXT_TOK; } else if (json_strcmp(js, t, "project") == 0) { NEXT_TOK; json_type_assert(t, JSMN_STRING); json_strcpy(project, t, js); project_set = 1; NEXT_TOK; } else if (json_strcmp(js, t, "collector") == 0) { NEXT_TOK; json_type_assert(t, JSMN_STRING); json_strcpy(collector, t, js); collector_set = 1; NEXT_TOK; } else if (json_strcmp(js, t, "type") == 0) { NEXT_TOK; json_type_assert(t, JSMN_STRING); json_strcpy(type, t, js); type_set = 1; NEXT_TOK; } else if (json_strcmp(js, t, "initialTime") == 0) { NEXT_TOK; json_type_assert(t, JSMN_PRIMITIVE); json_strtoul(initial_time, t); initial_time_set = 1; NEXT_TOK; } else if (json_strcmp(js, t, "duration") == 0) { NEXT_TOK; json_type_assert(t, JSMN_PRIMITIVE); json_strtoul(duration, t); duration_set = 1; NEXT_TOK; } else { fprintf(stderr, "ERROR: Unknown field '%.*s'\n", t->end - t->start, js + t->start); goto err; } } // file obj has been completely read if (url_set == 0 || project_set == 0 || collector_set == 0 || type_set == 0 || initial_time_set == 0 || duration_set == 0) { fprintf(stderr, "ERROR: Invalid dumpFile record\n"); goto retry; } #ifdef WITH_BROKER_DEBUG fprintf(stderr, "----------\n"); fprintf(stderr, "URL: %s\n", url); fprintf(stderr, "Project: %s\n", project); fprintf(stderr, "Collector: %s\n", collector); fprintf(stderr, "Type: %s\n", type); fprintf(stderr, "InitialTime: %" PRIu32 "\n", initial_time); fprintf(stderr, "Duration: %" PRIu32 "\n", duration); #endif // do we need to update our current_window_end? if (initial_time + duration > broker_ds->current_window_end) { broker_ds->current_window_end = (initial_time + duration); } if (bgpstream_input_mgr_push_sorted_input( input_mgr, strdup(url), strdup(project), strdup(collector), strdup(type), initial_time, duration) <= 0) { goto err; } num_results++; } } // TODO: handle unknown tokens } if (time_set == 0) { goto err; } free(url); return num_results; retry: free(url); return ERR_RETRY; err: fprintf(stderr, "ERROR: Invalid JSON response received from broker\n"); free(url); return ERR_RETRY; }