static ContentItem *parse_hhc(HHInfo *info, IStream *str, ContentItem *hhc_root, insert_type_t *insert_type) { stream_t stream; strbuf_t node, node_name; ContentItem *ret = NULL, *prev = NULL; *insert_type = INSERT_NEXT; strbuf_init(&node); strbuf_init(&node_name); stream_init(&stream, str); while(next_node(&stream, &node)) { get_node_name(&node, &node_name); TRACE("%s\n", node.buf); if(!strcasecmp(node_name.buf, "ul")) { ContentItem *item = parse_ul(info, &stream, hhc_root); prev = insert_item(prev, item, INSERT_CHILD); if(!ret) ret = prev; *insert_type = INSERT_CHILD; } strbuf_zero(&node); } strbuf_free(&node); strbuf_free(&node_name); return ret; }
BOOL next_node(stream_t *stream, strbuf_t *buf) { strbuf_t tmpbuf; /* search through the end of the current node */ strbuf_init(&tmpbuf); if(!find_node_end(stream, &tmpbuf)) { strbuf_free(&tmpbuf); return FALSE; } strbuf_free(&tmpbuf); /* find the beginning of the next node */ if(!stream_chr(stream, NULL, '<')) return FALSE; /* read out the data of the next node */ if(!find_node_end(stream, buf)) return FALSE; strbuf_append(buf, ">", 2); return TRUE; }
/* Parse the object tag corresponding to a list item. * * At this step we look for all of the "param" child tags, using this information * to build up the information about the list item. When we reach the </object> * tag we know that we've finished parsing this list item. */ static IndexItem *parse_index_sitemap_object(HHInfo *info, stream_t *stream) { strbuf_t node, node_name; IndexItem *item; strbuf_init(&node); strbuf_init(&node_name); item = heap_alloc_zero(sizeof(IndexItem)); item->nItems = 0; item->items = heap_alloc_zero(0); item->itemFlags = 0x11; while(next_node(stream, &node)) { get_node_name(&node, &node_name); TRACE("%s\n", node.buf); if(!strcasecmp(node_name.buf, "param")) { parse_index_obj_node_param(item, node.buf, info->pCHMInfo->codePage); }else if(!strcasecmp(node_name.buf, "/object")) { break; }else { WARN("Unhandled tag! %s\n", node_name.buf); } strbuf_zero(&node); } strbuf_free(&node); strbuf_free(&node_name); return item; }
static clj_Result read_typed_string(clj_Type type, const wchar_t *prefix, clj_Reader *r) { wint_t c; StringBuffer strbuf; int escape = 0; strbuf_init(&strbuf, 80); // C'mon now, how big is your terminal? strbuf_appends(&strbuf, prefix); while (1) { c = pop_char(r); switch (c) { case WEOF: strbuf_free(&strbuf); reader_error(r, CLJ_UNEXPECTED_EOF); case L'\\': strbuf_append(&strbuf, c); escape = !escape; break; case L'"': strbuf_append(&strbuf, c); if (escape) { escape = 0; break; } else { emit(r, type, strbuf.chars); strbuf_free(&strbuf); return CLJ_MORE; } default: escape = 0; strbuf_append(&strbuf, c); } } }
static void eddsa_openssh_blob(ssh_key *key, BinarySink *bs) { struct eddsa_key *ek = container_of(key, struct eddsa_key, sshk); assert(ek->curve->type == EC_EDWARDS); /* Encode the public and private points as strings */ strbuf *pub_sb = strbuf_new(); put_epoint(pub_sb, ek->publicKey, ek->curve, false); ptrlen pub = make_ptrlen(pub_sb->s + 4, pub_sb->len - 4); strbuf *priv_sb = strbuf_new_nm(); put_mp_le_unsigned(priv_sb, ek->privateKey); ptrlen priv = make_ptrlen(priv_sb->s + 4, priv_sb->len - 4); put_stringpl(bs, pub); /* Encode the private key as the concatenation of the * little-endian key integer and the public key again */ put_uint32(bs, priv.len + pub.len); put_datapl(bs, priv); put_datapl(bs, pub); strbuf_free(pub_sb); strbuf_free(priv_sb); }
// Free a cmark_node list and any children. void cmark_free_nodes(cmark_node *e) { cmark_node *next; while (e != NULL) { strbuf_free(&e->string_content); switch (e->type){ case NODE_FENCED_CODE: strbuf_free(&e->as.code.info); break; case NODE_STRING: case NODE_INLINE_HTML: case NODE_INLINE_CODE: cmark_chunk_free(&e->as.literal); break; case NODE_LINK: case NODE_IMAGE: free(e->as.link.url); free(e->as.link.title); break; default: break; } if (e->last_child) { // Splice children into list e->last_child->next = e->next; e->next = e->first_child; } next = e->next; free(e); e = next; } }
static ContentItem *parse_sitemap_object(HHInfo *info, stream_t *stream, ContentItem *hhc_root, insert_type_t *insert_type) { strbuf_t node, node_name; ContentItem *item; *insert_type = INSERT_NEXT; strbuf_init(&node); strbuf_init(&node_name); item = heap_alloc_zero(sizeof(ContentItem)); while(next_node(stream, &node)) { get_node_name(&node, &node_name); TRACE("%s\n", node.buf); if(!strcasecmp(node_name.buf, "/object")) break; if(!strcasecmp(node_name.buf, "param")) parse_obj_node_param(item, hhc_root, node.buf, info->pCHMInfo->codePage); strbuf_zero(&node); } strbuf_free(&node); strbuf_free(&node_name); if(item->merge.chm_index) { IStream *merge_stream; merge_stream = GetChmStream(info->pCHMInfo, item->merge.chm_file, &item->merge); if(merge_stream) { item->child = parse_hhc(info, merge_stream, hhc_root, insert_type); IStream_Release(merge_stream); }else { WARN("Could not get %s::%s stream\n", debugstr_w(item->merge.chm_file), debugstr_w(item->merge.chm_file)); if(!item->name) { free_content_item(item); item = NULL; } } } return item; }
/* Parse the HTML Help page corresponding to all of the Index items. * * At this high-level stage we locate out each HTML list item tag. * Since there is no end-tag for the <LI> item, we must hope that * the <LI> entry is parsed correctly or tags might get lost. * * Within each entry it is also possible to encounter an additional * <UL> tag. When this occurs the tag indicates that the topics * contained within it are related to the parent <LI> topic and * should be inset by an indent. */ static void parse_hhindex(HHInfo *info, IStream *str, IndexItem *item) { stream_t stream; strbuf_t node, node_name; int indent_level = -1; strbuf_init(&node); strbuf_init(&node_name); stream_init(&stream, str); while(next_node(&stream, &node)) { get_node_name(&node, &node_name); TRACE("%s\n", node.buf); if(!strcasecmp(node_name.buf, "li")) { IndexItem *new_item; new_item = parse_li(info, &stream); if(new_item && item->keyword && strcmpW(new_item->keyword, item->keyword) == 0) { int num_items = item->nItems; item_realloc(item, num_items+1); memcpy(&item->items[num_items], &new_item->items[0], sizeof(IndexSubItem)); heap_free(new_item->keyword); heap_free(new_item->items); heap_free(new_item); } else if(new_item) { item->next = new_item; item->next->merge = item->merge; item = item->next; item->indentLevel = indent_level; } }else if(!strcasecmp(node_name.buf, "ul")) { indent_level++; }else if(!strcasecmp(node_name.buf, "/ul")) { indent_level--; }else { WARN("Unhandled tag! %s\n", node_name.buf); } strbuf_zero(&node); } strbuf_free(&node); strbuf_free(&node_name); }
static int json_encode(lua_State *l) { json_config_t *cfg = json_fetch_config(l); strbuf_t local_encode_buf; strbuf_t *encode_buf; char *json; int len; luaL_argcheck(l, lua_gettop(l) == 1, 1, "expected 1 argument"); if (!cfg->encode_keep_buffer) { /* Use private buffer */ encode_buf = &local_encode_buf; strbuf_init(encode_buf, 0); } else { /* Reuse existing buffer */ encode_buf = &cfg->encode_buf; strbuf_reset(encode_buf); } json_append_data(l, cfg, 0, encode_buf); json = strbuf_string(encode_buf, &len); lua_pushlstring(l, json, len); if (!cfg->encode_keep_buffer) strbuf_free(encode_buf); return 1; }
bool rsa_ssh1_decrypt_pkcs1(mp_int *input, RSAKey *key, strbuf *outbuf) { strbuf *data = strbuf_new_nm(); bool success = false; BinarySource src[1]; { mp_int *b = rsa_ssh1_decrypt(input, key); for (size_t i = (mp_get_nbits(key->modulus) + 7) / 8; i-- > 0 ;) { put_byte(data, mp_get_byte(b, i)); } mp_free(b); } BinarySource_BARE_INIT(src, data->u, data->len); /* Check PKCS#1 formatting prefix */ if (get_byte(src) != 0) goto out; if (get_byte(src) != 2) goto out; while (1) { unsigned char byte = get_byte(src); if (get_err(src)) goto out; if (byte == 0) break; } /* Everything else is the payload */ success = true; put_data(outbuf, get_ptr(src), get_avail(src)); out: strbuf_free(data); return success; }
/* ** Put up dialog box to show progress and do the conversion. ** Does not return until dialog box is closed. */ void k2gui_cbox_do_conversion(K2GUI *k2gui0) { int status; STRBUF *cmdline,_cmdline; cmdline=&_cmdline; strbuf_init(cmdline); k2gui = k2gui0; k2gui_cbox=&_k2gui_cbox; k2gui_cbox_init(); /* Launch conversion dialog box and start the conversion thread */ status=k2gui_cbox_create_dialog_window(k2gui->k2conv,k2gui->env,cmdline,/* k2gui->cmdline, */ &k2gui->mainwin,willusgui_instance()); if (!status) { /* Disable parent so that convert dialog is modal. */ willusgui_control_enable(&k2gui->mainwin,0); /* Process messages from conversion dialog box */ k2gui_cbox_wait_for_conversion_dialog_box_messages(); willusgui_control_enable(&k2gui->mainwin,1); k2gui_cbox_destroy(); /* Without this, the main window seems to lose focus */ willusgui_window_set_focus(&k2gui->mainwin); } strbuf_free(cmdline); }
char* read_file(char *path) { HEX_ASSERT(path); Strbuf strbuf = strbuf_create(); HEX_ASSERT(strbuf); char c; while( (c = fgetc(_f) ) != EOF ) { if(c == '\n') continue; char s[2]; memset(s, 0, sizeof(s)); snprintf(s, sizeof(s), "%c", c); strbuf_append(strbuf, s); } char *str = strdup(strbuf_cstr(strbuf)); HEX_ASSERT(str); strbuf_free(&strbuf); return str; }
static void curl_thread_done(uv_work_t *w, int _) { luv_curl_t *lc = (luv_curl_t *)w->data; lua_State *L = lc->L; free(w); if (lc->stat == DOWNLOADING) lc->stat = DONE; // 1 if (lc->curl_ret == 0 && lc->retsb) { strbuf_append_char(lc->retsb, 0); lua_pushstring(L, lc->retsb->buf); } else lua_pushnil(L); if (lc->retfp) fclose(lc->retfp); // 2 getinfo(lc); push_curl_stat(lc); lua_do_global_callback(lc->L, "curl_done", lc->c, 2, 1); lua_do_global_callback(lc->L, "curl_cancel", lc->c, 0, 0); curl_easy_cleanup(lc->c); if (lc->retsb) strbuf_free(lc->retsb); if (lc->retfname) free(lc->retfname); }
static int json_encode(lua_State *l) { json_config_t *cfg; char *json; int len; /* Can't use json_verify_arg_count() since we need to ensure * there is only 1 argument */ luaL_argcheck(l, lua_gettop(l) == 1, 1, "expected 1 argument"); cfg = json_fetch_config(l); cfg->current_depth = 0; /* Reset the persistent buffer if it exists. * Otherwise allocate a new buffer. */ if (strbuf_allocated(&cfg->encode_buf)) strbuf_reset(&cfg->encode_buf); else strbuf_init(&cfg->encode_buf, 0); json_append_data(l, cfg, &cfg->encode_buf); json = strbuf_string(&cfg->encode_buf, &len); lua_pushlstring(l, json, len); if (!cfg->encode_keep_buffer) strbuf_free(&cfg->encode_buf); return 1; }
void teardown() { _root=NULL; fclose(_f); strbuf_free(&_strbuf); HEX_ASSERT(_strbuf == NULL); }
void search(bwttext * t, unsigned char * p, unsigned int l, unsigned char delimiter, int post_content) { // searching forward or backward here doesn't matter fpos_range * r = search_backward(t, p, l); //fpos_range * r = search_forward(t, p, l); if (r != NULL) { unsigned long i, p; plset * ps = plset_init(); //printf("found between f-l=%lu-%lu\n", r->first, r->last); for (i = r->first; i <= r->last; i++) { strbuf * sb1 = strbuf_init(); p = decode_forward_until(t, i, delimiter, post_content, sb1); if (!post_content) { // e.g. [xxx [yyy // could decode backward to get the ['s fpos // however, ['s order in the first column is different // from the ones in the last column // in positional BWT, the special char in the first // column is manually ordered // therefore, ['s occ in BWT != its occ in the first column // the solution here is instead to find [ in xxx[ first, // then get position of the previous one if (p == 0) p = t->char_table[1].ss - 1; // fpos of last [ else p--; // fpos of the previous [ // otherwise, // e.g. xxx\n yyy\n // decode forward to get the \n's fpos // it's normal } if (plset_contains(ps, p)) { strbuf_free(sb1); } else { strbuf * sb2 = strbuf_init(); decode_backward_until(t, i, delimiter, !post_content, sb2); plset_put(ps, p, sb2, sb1); /* strbuf_dump_rev(sb2, stdout); strbuf_dump(sb1, stdout); printf(" [pos=%lu]\n", p); */ } } plset_sort(ps); plset_print(ps, stdout); plset_free(ps); } else { fprintf(stderr, "no results found\n"); } free(r); }
static void json_encode_exception(lua_State *l, json_config_t *cfg, strbuf_t *json, int lindex, const char *reason) { if (!cfg->encode_keep_buffer) strbuf_free(json); luaL_error(l, "Cannot serialise %s: %s", lua_typename(l, lua_type(l, lindex)), reason); }
static void generate_bz_comment(struct strbuf *result, problem_data_t *pd, GList *comment_fmt_spec) { bool last_line_is_empty = true; GList *l = comment_fmt_spec; while (l) { section_t *sec = l->data; l = l->next; /* Skip special sections such as "%attach" */ if (sec->name[0] == '%') continue; if (sec->items) { /* "Text: item[,item]..." */ struct strbuf *output = strbuf_new(); GList *item = sec->items; while (item) { const char *str = item->data; item = item->next; if (str[0] == '-') /* "-name", ignore it */ continue; append_item(output, str, pd, comment_fmt_spec); } if (output->len != 0) { strbuf_append_strf(result, sec->name[0] ? "%s:\n%s" : "%s%s", sec->name, output->buf ); last_line_is_empty = false; } strbuf_free(output); } else { /* Just "Text" (can be "") */ /* Filter out consecutive empty lines */ if (sec->name[0] != '\0' || !last_line_is_empty) strbuf_append_strf(result, "%s\n", sec->name); last_line_is_empty = (sec->name[0] == '\0'); } } /* Nuke any trailing empty lines */ while (result->len >= 1 && result->buf[result->len-1] == '\n' && (result->len == 1 || result->buf[result->len-2] == '\n') ) { result->buf[--result->len] = '\0'; } }
char *cmark_render_ast(cmark_node *root) { char* result; strbuf buffer = GH_BUF_INIT; render_nodes(&buffer, root, -2); result = (char *)strbuf_detach(&buffer); strbuf_free(&buffer); return result; }
void free_run_event_state(struct run_event_state *state) { if (state) { strbuf_free(state->command_output); free_commands(state); free(state); } }
static ContentItem *parse_ul(HHInfo *info, stream_t *stream, ContentItem *hhc_root) { strbuf_t node, node_name; ContentItem *ret = NULL, *prev = NULL, *new_item = NULL; insert_type_t it; strbuf_init(&node); strbuf_init(&node_name); while(next_node(stream, &node)) { get_node_name(&node, &node_name); TRACE("%s\n", node.buf); if(!strcasecmp(node_name.buf, "object")) { const char *ptr; int len; static const char sz_text_sitemap[] = "text/sitemap"; ptr = get_attr(node.buf, "type", &len); if(ptr && len == sizeof(sz_text_sitemap)-1 && !memcmp(ptr, sz_text_sitemap, len)) { new_item = parse_sitemap_object(info, stream, hhc_root, &it); prev = insert_item(prev, new_item, it); if(!ret) ret = prev; } }else if(!strcasecmp(node_name.buf, "ul")) { new_item = parse_ul(info, stream, hhc_root); insert_item(prev, new_item, INSERT_CHILD); }else if(!strcasecmp(node_name.buf, "/ul")) { break; } strbuf_zero(&node); } strbuf_free(&node); strbuf_free(&node_name); return ret; }
char * htmlent_decode(const char *html) { bool in_ent = false; struct strbuf *buf = strbuf_init(); struct strbuf *ent = strbuf_init(); char *res; const char *dec; while (*html) { char c = *html++; if (in_ent) { strbuf_addc(ent, c); if (c == ';') { char *ent_cstr = strbuf_cstr(ent); strbuf_clear(ent); in_ent = false; dec = htmlent_table_lookup_dec(ent_cstr); amz_free(ent_cstr); if (dec != NULL) strbuf_add(buf, dec); } continue; } else if (c == '&') { in_ent = true; strbuf_addc(ent, c); continue; } strbuf_addc(buf, c); } res = strbuf_cstr(buf); strbuf_free(buf); strbuf_free(ent); return res; }
static void json_encode_descend(lua_State *l, json_config_t *cfg) { cfg->current_depth++; if (cfg->current_depth > cfg->encode_max_depth) { if (!cfg->encode_keep_buffer) strbuf_free(&cfg->encode_buf); luaL_error(l, "Cannot serialise, excessive nesting (%d)", cfg->current_depth); } }
cmark_node *cmark_finish(cmark_doc_parser *parser) { finalize_document(parser); strbuf_free(parser->curline); #if CMARK_DEBUG_NODES if (cmark_node_check(parser->root)) { abort(); } #endif return parser->root; }
static int json_destroy_config(lua_State *l) { struct luaL_serializer *cfg; cfg = lua_touserdata(l, 1); if (cfg) strbuf_free(&encode_buf); cfg = NULL; return 0; }
static int json_destroy_config(lua_State *l) { json_config_t *cfg; cfg = lua_touserdata(l, 1); if (cfg) strbuf_free(&cfg->encode_buf); cfg = NULL; return 0; }
void decode_range(bwttext * t, unsigned long start_pos, unsigned long end_pos, unsigned char delimiter, int post_content, FILE * fout) { strbuf * sb; unsigned long i, l; // boundary check if (t->char_num > 1) { // position of the last record l = t->char_table[1].ss; // adjust end of the query if (end_pos > l) end_pos = l; // adjust beginning of the query if (start_pos < 1) start_pos = 1; } else if (t->char_num == 1 && t->char_table[0].c != delimiter && start_pos < 2 && end_pos > 0) { // extreme case: only one char without delimiter l = 1; start_pos = 1; end_pos = 1; } else return; for (i = start_pos; i <= end_pos; i++) { sb = strbuf_init(); if (post_content) { // - delimiter follows content, e.g. xx\n // only include the delimiter that comes after the content decode_backward_until(t, i - 1, delimiter, 0, sb); strbuf_dump_rev(sb, fout); sb->direct_out = fout; decode_forward_until(t, i - 1, delimiter, 1, sb); } else { // - delimiter is followed by content, e.g. [xx // without outputing the delimiter after the content if (i == l) // before the first (0) is the last decode_backward_until(t, 0, delimiter, 1, sb); else // before the next (i) is the current (i-1) decode_backward_until(t, i, delimiter, 1, sb); strbuf_dump_rev(sb, fout); } strbuf_free(sb); } }
// Free a node_block list and any children. void cmark_free_blocks(cmark_node_block *e) { cmark_node_block * next; while (e != NULL) { cmark_free_inlines(e->inline_content); strbuf_free(&e->string_content); if (e->tag == CMARK_BLOCK_FENCED_CODE) { strbuf_free(&e->as.code.info); } else if (e->tag == CMARK_BLOCK_DOCUMENT) { reference_map_free(e->as.document.refmap); } if (e->last_child) { // Splice children into list e->last_child->next = e->next; e->next = e->children; } next = e->next; free(e); e = next; } }
void config_close(struct config *config) { if (!config) return; if (config->mapped.addr) munmap(config->mapped.addr, config->mapped.sz); free(config->error_message); strbuf_free(&config->parser.strbuf); free(config); }
/* Parse the HTML list item node corresponding to a specific help entry. * * At this stage we look for the only child tag we expect to find under * the list item: the <OBJECT> tag. We also only expect to find object * tags with the "type" attribute set to "text/sitemap". */ static IndexItem *parse_li(HHInfo *info, stream_t *stream) { strbuf_t node, node_name; IndexItem *ret = NULL; strbuf_init(&node); strbuf_init(&node_name); while(next_node(stream, &node)) { get_node_name(&node, &node_name); TRACE("%s\n", node.buf); if(!strcasecmp(node_name.buf, "object")) { const char *ptr; int len; static const char sz_text_sitemap[] = "text/sitemap"; ptr = get_attr(node.buf, "type", &len); if(ptr && len == sizeof(sz_text_sitemap)-1 && !memcmp(ptr, sz_text_sitemap, len)) { ret = parse_index_sitemap_object(info, stream); break; } }else { WARN("Unhandled tag! %s\n", node_name.buf); } strbuf_zero(&node); } if(!ret) FIXME("Failed to parse <li> tag!\n"); strbuf_free(&node); strbuf_free(&node_name); return ret; }