static JsonElement *LookupVariable(Seq *hash_stack, const char *name, size_t name_len) { assert(SeqLength(hash_stack) > 0); size_t num_comps = StringCountTokens(name, name_len, "."); JsonElement *base_var = NULL; { StringRef base_comp = StringGetToken(name, name_len, 0, "."); char *base_comp_str = xstrndup(base_comp.data, base_comp.len); for (ssize_t i = SeqLength(hash_stack) - 1; i >= 0; i--) { JsonElement *hash = SeqAt(hash_stack, i); if (!hash) { continue; } if (JsonGetElementType(hash) == JSON_ELEMENT_TYPE_CONTAINER && JsonGetContainerType(hash) == JSON_CONTAINER_TYPE_OBJECT) { JsonElement *var = JsonObjectGet(hash, base_comp_str); if (var) { base_var = var; break; } } } free(base_comp_str); } if (!base_var) { return NULL; } for (size_t i = 1; i < num_comps; i++) { if (JsonGetElementType(base_var) != JSON_ELEMENT_TYPE_CONTAINER || JsonGetContainerType(base_var) != JSON_CONTAINER_TYPE_OBJECT) { return NULL; } StringRef comp = StringGetToken(name, name_len, i, "."); char *comp_str = xstrndup(comp.data, comp.len); base_var = JsonObjectGet(base_var, comp_str); free(comp_str); if (!base_var) { return NULL; } } assert(base_var); return base_var; }
static bool SetDelimiters(const char *content, size_t content_len, char *delim_start, size_t *delim_start_len, char *delim_end, size_t *delim_end_len) { size_t num_tokens = StringCountTokens(content, content_len, " \t"); if (num_tokens != 2) { Log(LOG_LEVEL_WARNING, "Could not parse delimiter mustache, " "number of tokens is %llu, expected 2 in '%s'", (unsigned long long) num_tokens, content); return false; } StringRef first = StringGetToken(content, content_len, 0, " \t"); if (first.len > MUSTACHE_MAX_DELIM_SIZE) { Log(LOG_LEVEL_WARNING, "New mustache start delimiter exceeds the allowed size of %d in '%s'", MUSTACHE_MAX_DELIM_SIZE, content); return false; } strncpy(delim_start, first.data, first.len); delim_start[first.len] = '\0'; *delim_start_len = first.len; StringRef second = StringGetToken(content, content_len, 1, " \t"); if (second.len > MUSTACHE_MAX_DELIM_SIZE) { Log(LOG_LEVEL_WARNING, "New mustache start delimiter exceeds the allowed size of %d in '%s'", MUSTACHE_MAX_DELIM_SIZE, content); return false; } strncpy(delim_end, second.data, second.len); delim_end[second.len] = '\0'; *delim_end_len = second.len; return true; }
static void *CFTestD_ServeReport(void *config_arg) { CFTestD_Config *config = (CFTestD_Config *) config_arg; /* Set prefix for all Log()ging: */ LoggingPrivContext *prior = LoggingPrivGetContext(); LoggingPrivContext log_ctx = { .log_hook = LogAddPrefix, .param = config->address }; LoggingPrivSetContext(&log_ctx); char *priv_key_path = NULL; char *pub_key_path = NULL; if (config->key_file != NULL) { priv_key_path = config->key_file; pub_key_path = xstrdup(priv_key_path); StringReplace(pub_key_path, strlen(pub_key_path) + 1, "priv", "pub"); } LoadSecretKeys(priv_key_path, pub_key_path, &(config->priv_key), &(config->pub_key)); free(pub_key_path); char *report_file = config->report_file; if (report_file != NULL) { Log(LOG_LEVEL_NOTICE, "Got file argument: '%s'", report_file); if (!FileCanOpen(report_file, "r")) { Log(LOG_LEVEL_ERR, "Can't open file '%s' for reading", report_file); exit(EXIT_FAILURE); } Writer *contents = FileRead(report_file, SIZE_MAX, NULL); if (!contents) { Log(LOG_LEVEL_ERR, "Error reading report file '%s'", report_file); exit(EXIT_FAILURE); } size_t report_data_len = StringWriterLength(contents); config->report_data = StringWriterClose(contents); Seq *report = SeqNew(64, NULL); size_t report_len = 0; StringRef ts_ref = StringGetToken(config->report_data, report_data_len, 0, "\n"); char *ts = (char *) ts_ref.data; *(ts + ts_ref.len) = '\0'; SeqAppend(report, ts); /* start right after the newline after the timestamp header */ char *position = ts + ts_ref.len + 1; char *report_line; size_t report_line_len; while (CFTestD_GetReportLine(position, &report_line, &report_line_len)) { *(report_line + report_line_len) = '\0'; SeqAppend(report, report_line); report_len += report_line_len; position = report_line + report_line_len + 1; /* there's an extra newline after each report_line */ } config->report = report; config->report_len = report_len; Log(LOG_LEVEL_NOTICE, "Read %d bytes for report contents", config->report_len); if (config->report_len <= 0) { Log(LOG_LEVEL_ERR, "Report file contained no bytes"); exit(EXIT_FAILURE); } } Log(LOG_LEVEL_INFO, "Starting server at %s...", config->address); fflush(stdout); // for debugging startup config->ret = CFTestD_StartServer(config); free(config->report_data); /* we don't really need to do this here because the process is about the * terminate, but it's a good way the cleanup actually works and doesn't * cause a segfault or something */ ServerTLSDeInitialize(&(config->priv_key), &(config->pub_key), &(config->ssl_ctx)); LoggingPrivSetContext(prior); return NULL; } static void HandleSignal(int signum) { switch (signum) { case SIGTERM: case SIGINT: // flush all logging before process ends. fflush(stdout); fprintf(stderr, "Terminating...\n"); TERMINATE = true; break; default: break; } } /** * @param ip_str string representation of an IPv4 address (the usual one, with * 4 octets separated by dots) * @return a new string representing the incremented IP address (HAS TO BE FREED) */ static char *IncrementIPaddress(const char *ip_str) { uint32_t ip = (uint32_t) inet_addr(ip_str); if (ip == INADDR_NONE) { Log(LOG_LEVEL_ERR, "Failed to parse address: '%s'", ip_str); return NULL; } int step = 1; char *last_dot = strrchr(ip_str, '.'); assert(last_dot != NULL); /* the doc comment says there must be dots! */ if (StringSafeEqual(last_dot + 1, "255")) { /* avoid the network address (ending with 0) */ step = 2; } else if (StringSafeEqual(last_dot + 1, "254")) { /* avoid the broadcast address and the network address */ step = 3; } uint32_t ip_num = ntohl(ip); ip_num += step; ip = htonl(ip_num); struct in_addr ip_struct; ip_struct.s_addr = ip; return xstrdup(inet_ntoa(ip_struct)); }