static void destroy_node(node_t *node) { Word_t index = 0; Word_t *pnode; Word_t rc_word; JLF(pnode, node->PJLarray_branches, index); while (pnode != NULL) { destroy_node((node_t *)*pnode); JLN(pnode, node->PJLarray_branches, index); } JLFA(rc_word, node->PJLarray_branches); JLFA(rc_word, node->PJLarray_prefixes); switch_free(node); }
int network_manager_destroy(struct network_manager* nm) { if(nm->listening_sockets != NULL) { size_t num_interfaces = chaind_config(nm->chaind)->network.num_interfaces; for(size_t i = 0; i < num_interfaces; i++) { close(nm->listening_sockets[i]); } free(nm->listening_sockets); } int rc; Word_t wrc; JHSFA(wrc, nm->claimed_invs); JHSFA(wrc, nm->wanted_invs_by_inv); struct inv** pinv = NULL; Word_t index = 0; JLF(pinv, nm->block_inv_list, index); while(pinv != NULL) { free(*pinv); JLN(pinv, nm->block_inv_list, index); } JLFA(wrc, nm->block_inv_list); index = 0; JLF(pinv, nm->tx_inv_list, index); while(pinv != NULL) { free(*pinv); JLN(pinv, nm->tx_inv_list, index); } JLFA(wrc, nm->tx_inv_list); if(nm->peer_discovery != NULL) { peer_discovery_done(nm->peer_discovery); nm->peer_discovery = NULL; } JLFA(rc, nm->poll_socket_by_peer); vector_free(&nm->poll_fds); // TODO free nm->peer_list // TODO free nm->peer_by_address free(nm); return 0; }
int dogma_free_context(dogma_context_t* ctx) { dogma_drone_context_t** value; dogma_key_t index = 0; int ret; if(ctx->fleet != NULL) { bool found; DOGMA_ASSUME_OK(dogma_remove_fleet_member(ctx->fleet, ctx, &found)); assert(found == true && ctx->fleet == NULL); } dogma_free_env(ctx, ctx->character); dogma_free_env(ctx, ctx->gang); dogma_reset_skill_levels(ctx); JLF(value, ctx->drone_map, index); while(value != NULL) { /* The drone environments were freed when char was freed */ free(*value); JLN(value, ctx->drone_map, index); } JLFA(ret, ctx->drone_map); free(ctx); return DOGMA_OK; }
int main() { Pvoid_t PJArray = (Pvoid_t) NULL; PPosition_type pos = new_position(); PState_DFS state = new_state(); load_pjarray (&PJArray); for (int f = 0; f < LEN * LEN; ++f) { if (pos->taken[f] == N) { put (pos, state, f); if (_winning (pos, state, &PJArray)) { show (pos); } unput (pos, f); } } save_pjarray (PJArray); Word_t Rc_word; JLFA (Rc_word, PJArray); printf ( "put %lu ins %lu hit %lu Judy-bytes %lu\n" , _cnt_put, _cnt_ins, _cnt_hit, Rc_word ); release_position (pos); release_state (state); }
void dbus_destroy_service(struct dbus* dbus) { Word_t wrc; JLFA(wrc, dbus->watches); if(dbus->conn != NULL) dbus_connection_unref(dbus->conn); free(dbus); }
PyObject* pointless_write_object_to_buffer(PyObject* self, PyObject* args, PyObject* kwds) { PyObject* object = 0; PyObject* retval = 0; PyObject* normalize_bitvector = Py_True; PyObject* unwiden_strings = Py_False; int create_end = 0; void* buf = 0; size_t buflen = 0; const char* error = 0; pointless_export_state_t state; state.objects_used = 0; state.is_error = 0; state.error_line = -1; state.unwiden_strings = 0; state.normalize_bitvector = 1; static char* kwargs[] = {"object", "unwiden_strings", "normalize_bitvector", 0}; if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O!O!:serialize", kwargs, &object, &PyBool_Type, &unwiden_strings, &PyBool_Type, &normalize_bitvector)) return 0; state.unwiden_strings = (unwiden_strings == Py_True); state.normalize_bitvector = (normalize_bitvector == Py_True); pointless_create_begin_64(&state.c); pointless_export_py(&state, object); if (state.is_error) goto cleanup; create_end = 0; if (!pointless_create_output_and_end_b(&state.c, &buf, &buflen, &error)) { PyErr_Format(PyExc_IOError, "pointless_create_output: %s", error); goto cleanup; } retval = (PyObject*)PyPointlessPrimVector_from_buffer(buf, buflen); cleanup: if (create_end) pointless_create_end(&state.c); Word_t n_bytes_freed = 0; JLFA(n_bytes_freed, state.objects_used); return retval; }
void pq_overload_destroy(pq_context *ctx, pq_overload *overload) { Word_t bytes, index, *pvalue; Pvoid_t table; index = 0; JLF(pvalue, overload->variadic_function_table, index); while(pvalue != NULL) { table = (Pvoid_t)*pvalue; JLFA(bytes, table); JLN(pvalue, overload->variadic_function_table, index); } JLFA(bytes, overload->variadic_function_table); index = 0; JLF(pvalue, overload->function_table, index); while(pvalue != NULL) { table = (Pvoid_t)*pvalue; JLFA(bytes, table); JLN(pvalue, overload->function_table, index); } JLFA(bytes, overload->function_table); }
void pq_scope_destroy(pq_context *ctx, pq_scope *scope) { Word_t bytes; JLFA(bytes, scope->table); // Destroy values created at this Scope // The reverse order is important, as newer Values may reference older ones: // this is particularly true for builtin Types. int i; for(i = scope->created_values.size - 1; i >= 0; i--) { pq_release_value(ctx, *pq_vector_at(&scope->created_values, i, pq_value *)); } pq_vector_destroy(&scope->created_values); }
void free_p_entries(my_int len_sa) { my_int i; Word_t Bytes; idx=0; JLF(pval,judy,idx); while (1) { if (pval==NULL) break; J1FA(Bytes,*pval); JLN(pval,judy,idx); } JLFA(Bytes,judy); }
int dogma_reset_skill_levels(dogma_context_t* ctx) { int ret; JLFA(ret, ctx->skill_levels); return DOGMA_OK; }
int main(int argc, char *argv[]) { if (argc != 8) { fprintf(stderr, "Usage: ./merge_N [out_dir_name 1] [item_#_file 2] [length_#_file 3] [count_dist_file 4] [number_of_temp_prefix 5] [from_temp_n] [to_temp_n]\n"); // /root/cx_src/src/merge_N /tmp/data /tmp/result/ino.txt /tmp/result/lno.txt /tmp/result/cdo.txt 3 0 1 return -1; } if ((itf = fopen(argv[2], "a")) == NULL) { fprintf(stderr, "Failed to open file \"%s\" for writing item numbers\n", argv[2]); return -1; } // fixing each temp file fprintf(stdout, "Start fixing temp files\n"); for (i = atoi(argv[6]); i <= atoi(argv[7]); ++i) { //for (i = 0; i < TEMP_N; ++i) { // fix the temp file i int temp_prefix_num = 0;// num of concurrent threads while (temp_prefix_num < atoi(argv[5])){ sprintf(buffer, "%s/%s%d-%d.txt", argv[1], TEMP_PREFIX, i, temp_prefix_num); fprintf(stdout, "\rWorking on temp file: \"%s\" \n", buffer); //fflush(stdout); if ((tsf[i] = fopen(buffer, "r")) == NULL) { fprintf(stderr, "Failed to open file \"%s\" for reading temp strings\n", buffer); //continue; //break; return -1; } while (fscanf(tsf[i], "%"PRId64"\t", &itemn) != EOF) { fgets(Index, BUFFER_SIZE, tsf[i]); for (Len = strlen(Index) - 1; Index[Len] == '\n' || Index[Len] == '\r'; Len--) Index[Len] = 0; ++Len; JSLI(PValNgramS, PJSLNgram, (uint8_t *)Index); if (PValNgramS == PJERR) { fprintf(stderr, "Malloc failed for \"PJSLNgram\"\n"); //return -1; } (*PValNgramS) += itemn; JLI(PValTotC, PJLTotCount, Len); if (PValTotC == PJERR) { fprintf(stderr, "Malloc failed for \"PJLTotCount\"\n"); //return -1; } *PValTotC += itemn; if (*PValNgramS == itemn) { JLI(PValNgramC, PJLNgramCount, Len); if (PValNgramC == PJERR) { fprintf(stderr, "Malloc failed for \"PJLNgramCount\"\n"); //return -1; } ++*PValNgramC; } } sprintf(buffer, "rm %s/%s%d-%d.txt", argv[1], TEMP_PREFIX, i, temp_prefix_num++); if (system(buffer) == -1) { fprintf(stderr, "Failed to execute command: \"%s\"\n", buffer); //return -1; } fclose(tsf[i]); } // write the final temp file sprintf(buffer, "%s/%s%d.txt", argv[1], TEMP_PREFIX, i); if ((tsf[i] = fopen(buffer, "w")) == NULL) { fprintf(stderr, "Failed to open file \"%s\" for writing temp strings\n", buffer); return -1; } Index[0] = '\0'; JSLF(PValNgramS, PJSLNgram, (uint8_t *)Index); while (PValNgramS != NULL) { fprintf(tsf[i], "%lu\t%s\n", *PValNgramS, Index); Count = *PValNgramS; JLI(PValCountC, PJLCountCount, Count); if (PValCountC == PJERR) { fprintf(stderr, "Malloc failed for \"PJLCountCount\"\n"); return -1; } ++*PValCountC; JSLN(PValNgramS, PJSLNgram, (uint8_t *)Index); } JSLFA(Bytes, PJSLNgram); fflush(tsf[i]); fclose(tsf[i]); fprintf(itf, "Temp file \"%s/%s%d\" uses %lu Bytes of memory\n", argv[1], TEMP_PREFIX, i, Bytes); fflush(itf); } fclose(itf); if ((lef = fopen(argv[3], "a")) == NULL) { fprintf(stderr, "Failed to open file \"%s\" for writing length number\n", argv[3]); return -1; } Total = NgramN = 0; JLF(PValTotC, PJLTotCount, Total); JLF(PValNgramC, PJLNgramCount, NgramN); while (PValTotC != NULL) { fprintf(lef, "%lu\t%lu\t%lu\n", Total, *PValNgramC, *PValTotC); JLN(PValTotC, PJLTotCount, Total); JLN(PValNgramC, PJLNgramCount, NgramN); } JLFA(Bytes, PJLTotCount); JLFA(Bytes, PJLNgramCount); fflush(lef); fclose(lef); if ((cdf = fopen(argv[4], "a")) == NULL) { fprintf(stderr, "Failed to open file \"%s\" for writing count distribuction\n", argv[4]); return -1; } Count = 0; JLF(PValCountC, PJLCountCount, Count); while (PValCountC != NULL) { fprintf(cdf, "%lu\t%lu\n", Count, *PValCountC); JLN(PValCountC, PJLCountCount, Count); } JLFA(Bytes, PJLCountCount); fflush(cdf); fclose(cdf); return 0; }
void cjson_free(struct cjson *node) { if (node == NULL) { return; } switch (node->type) { case CJSON_ARRAY: { int status = 0; Word_t index = 0; struct cjson **value = NULL; JLF(value, node->value.array.data, index); while (value != NULL) { cjson_free(*value); JLN(value, node->value.array.data, index); } JLFA(status, node->value.array.data); } break; case CJSON_BOOLEAN: break; case CJSON_NULL: break; case CJSON_NUMBER: free(node->value.number); break; case CJSON_OBJECT: { int status = 0; uint8_t *key = ecx_malloc(node->value.object.key_length + 1); ec_with(key, free) { key[0] = '\0'; struct cjson **value = NULL; JSLF(value, node->value.object.data, key); while (value != NULL) { cjson_free(*value); JSLN(value, node->value.object.data, key); } JSLFA(status, node->value.object.data); } } break; case CJSON_PAIR: free(node->value.pair.key); cjson_free(node->value.pair.value); break; case CJSON_ROOT: { int status = 0; Word_t index = 0; struct cjson **value = NULL; JLF(value, node->value.root.data, index); while (value != NULL) { cjson_free(*value); JLN(value, node->value.root.data, index); } JLFA(status, node->value.root.data); } break; case CJSON_STRING: free(node->value.string.bytes); break; }
int dogma_free_env(dogma_context_t* ctx, dogma_env_t* env) { int ret; dogma_key_t index = -1, index2, index3; dogma_env_t** child; dogma_array_t* modifiers; dogma_array_t* modifiers2; dogma_modifier_t** modifier; /* Clear our own target */ if(env->target.context != NULL) { assert(dogma_set_target(ctx, env, NULL, NULL) == DOGMA_OK); } /* Clear any targets of things that have what we're about do * delete as a target */ if(env->targeted_by != NULL) { dogma_key_t index = 0; dogma_context_t** targeter; dogma_env_t* source; JLF(targeter, env->targeted_by, index); while(targeter != NULL) { source = (dogma_env_t*)index; assert(dogma_set_target(*targeter, source, NULL, NULL) == DOGMA_OK); JLN(targeter, env->targeted_by, index); } JLC(ret, env->targeted_by, 0, -1); assert(ret == 0); JLFA(ret, env->targeted_by); assert(ret == 0); } /* Clear any chance-based effects */ if(env->chance_effects != NULL) { dogma_key_t effectid = 0; bool* val; JLF(val, env->chance_effects, effectid); while(val != NULL) { DOGMA_ASSUME_OK(dogma_toggle_chance_based_effect_env(ctx, env, effectid, false)); JLN(val, env->chance_effects, effectid); } JLFA(ret, env->chance_effects); } JLL(child, env->children, index); while(child != NULL) { dogma_free_env(ctx, *child); JLP(child, env->children, index); } JLFA(ret, env->children); index = 0; JLF(modifiers, env->modifiers, index); while(modifiers != NULL) { index2 = 0; JLF(modifiers2, *modifiers, index2); while(modifiers2 != NULL) { index3 = 0; JLF(modifier, *modifiers2, index3); while(modifier != NULL) { free(*modifier); JLN(modifier, *modifiers2, index3); } JLFA(ret, *modifiers2); JLN(modifiers2, *modifiers, index2); } JLFA(ret, *modifiers); JLN(modifiers, env->modifiers, index); } JLFA(ret, env->modifiers); free(env); return DOGMA_OK; }
int jtableL_free(jtableL *table) { int ret; JLFA(ret, table->t); return ret; }
tdb_error tdb_encode(tdb_cons *cons, const tdb_item *items) { char path[TDB_MAX_PATH_SIZE]; char grouped_path[TDB_MAX_PATH_SIZE]; char toc_path[TDB_MAX_PATH_SIZE]; char *root = cons->root; char *read_buf = NULL; struct field_stats *fstats = NULL; uint64_t num_trails = 0; uint64_t num_events = cons->events.next; uint64_t num_fields = cons->num_ofields + 1; uint64_t max_timestamp = 0; uint64_t max_timedelta = 0; uint64_t *field_cardinalities = NULL; uint64_t i; Pvoid_t unigram_freqs = NULL; struct judy_128_map gram_freqs; struct judy_128_map codemap; Word_t tmp; FILE *grouped_w = NULL; FILE *grouped_r = NULL; int fd, ret = 0; TDB_TIMER_DEF j128m_init(&gram_freqs); j128m_init(&codemap); if (!(field_cardinalities = calloc(cons->num_ofields, 8))){ ret = TDB_ERR_NOMEM; goto done; } for (i = 0; i < cons->num_ofields; i++) field_cardinalities[i] = jsm_num_keys(&cons->lexicons[i]); /* 1. group events by trail, sort events of each trail by time, and delta-encode timestamps */ TDB_TIMER_START TDB_PATH(grouped_path, "%s/tmp.grouped.XXXXXX", root); if ((fd = mkstemp(grouped_path)) == -1){ ret = TDB_ERR_IO_OPEN; goto done; } if (!(grouped_w = fdopen(fd, "w"))){ ret = TDB_ERR_IO_OPEN; goto done; } if (cons->events.data) if ((ret = groupby_uuid(grouped_w, (struct tdb_cons_event*)cons->events.data, cons, &num_trails, &max_timestamp, &max_timedelta))) goto done; /* not the most clean separation of ownership here, but these objects can be huge so keeping them around unecessarily is expensive */ free(cons->events.data); cons->events.data = NULL; j128m_free(&cons->trails); TDB_CLOSE(grouped_w); grouped_w = NULL; TDB_OPEN(grouped_r, grouped_path, "r"); if (!(read_buf = malloc(READ_BUFFER_SIZE))){ ret = TDB_ERR_NOMEM; goto done; } setvbuf(grouped_r, read_buf, _IOFBF, READ_BUFFER_SIZE); TDB_TIMER_END("trail/groupby_uuid"); /* 2. store metatadata */ TDB_TIMER_START TDB_PATH(path, "%s/info", root); if ((ret = store_info(path, num_trails, num_events, cons->min_timestamp, max_timestamp, max_timedelta))) goto done; TDB_TIMER_END("trail/info"); /* 3. collect value (unigram) freqs, including delta-encoded timestamps */ TDB_TIMER_START unigram_freqs = collect_unigrams(grouped_r, num_events, items, num_fields); if (num_events > 0 && !unigram_freqs){ ret = TDB_ERR_NOMEM; goto done; } TDB_TIMER_END("trail/collect_unigrams"); /* 4. construct uni/bi-grams */ tdb_opt_value dont_build_bigrams; tdb_cons_get_opt(cons, TDB_OPT_CONS_NO_BIGRAMS, &dont_build_bigrams); TDB_TIMER_START if ((ret = make_grams(grouped_r, num_events, items, num_fields, unigram_freqs, &gram_freqs, dont_build_bigrams.value))) goto done; TDB_TIMER_END("trail/gram_freqs"); /* 5. build a huffman codebook and stats struct for encoding grams */ TDB_TIMER_START if ((ret = huff_create_codemap(&gram_freqs, &codemap))) goto done; if (!(fstats = huff_field_stats(field_cardinalities, num_fields, max_timedelta))){ ret = TDB_ERR_NOMEM; goto done; } TDB_TIMER_END("trail/huff_create_codemap"); /* 6. encode and write trails to disk */ TDB_TIMER_START TDB_PATH(path, "%s/trails.data", root); TDB_PATH(toc_path, "%s/trails.toc", root); if ((ret = encode_trails(items, grouped_r, num_events, num_trails, num_fields, &codemap, &gram_freqs, fstats, path, toc_path))) goto done; TDB_TIMER_END("trail/encode_trails"); /* 7. write huffman codebook to disk */ TDB_TIMER_START tdb_path(path, "%s/trails.codebook", root); if ((ret = store_codebook(&codemap, path))) goto done; TDB_TIMER_END("trail/store_codebook"); done: TDB_CLOSE_FINAL(grouped_w); TDB_CLOSE_FINAL(grouped_r); j128m_free(&gram_freqs); j128m_free(&codemap); #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wsign-compare" JLFA(tmp, unigram_freqs); #pragma GCC diagnostic pop unlink(grouped_path); free(field_cardinalities); free(read_buf); free(fstats); return ret; out_of_memory: return TDB_ERR_NOMEM; }