int main(void) { extern hash_t *hash_table[TABLE_SIZE]; char *key = "al"; char *value = "password"; hash_table_insert(key, value); hash_table_insert("ed", "gowanlea"); hash_table_insert("bryan", "leicester"); hash_t *found = hash_table_search("al"); if (found) { printf("%s and %s\n", found->key, found->value); } found = hash_table_search("ed"); if (found) { printf("%s and %s\n", found->key, found->value); } else printf("not found!\n"); hash_table_dump(); // this is slow hash_table_free(); return EXIT_SUCCESS; }
void test_hash_iterator_key_pair() { HashTable *hash_table; HashTableIterator iterator; HashTablePair pair; int *key = 0; int *val = 0; hash_table = hash_table_new(int_hash, int_equal); /* Add some values */ hash_table_insert(hash_table, &value1, &value1); hash_table_insert(hash_table, &value2, &value2); hash_table_iterate(hash_table, &iterator); while (hash_table_iter_has_more(&iterator)) { /* Retrieve both Key and Value */ pair = hash_table_iter_next(&iterator); key = (int*) pair.key; val = (int*) pair.value; assert(*key == *val); } hash_table_free(hash_table); }
/* Insert -hmmdump, -lm, -svq4svq, -beam, -lminmemory into a hash and display it. */ int main(int argc, char **argv) { hash_table_t *ht; ht = hash_table_new(75, 0); if (hash_table_enter(ht, "-hmmdump", (void *)1) != (void *)1) { E_FATAL("Insertion of -hmmdump failed\n"); } if (hash_table_enter(ht, "-svq4svq", (void *)1) != (void *)1) { E_FATAL("Insertion of -svq4svq failed\n"); } if (hash_table_enter(ht, "-lm", (void *)1) != (void *)1) { E_FATAL("Insertion of -lm failed\n"); } if (hash_table_enter(ht, "-beam", (void *)1) != (void *)1) { E_FATAL("Insertion of -beam failed\n"); } if (hash_table_enter(ht, "-lminmemory", (void *)1) != (void *)1) { E_FATAL("Insertion of -lminmemory failed\n"); } hash_table_display(ht, 1); hash_table_free(ht); ht = NULL; return 0; }
int dict_free(dict_t * d) { int i; dictword_t *word; if (d == NULL) return 0; if (--d->refcnt > 0) return d->refcnt; /* First Step, free all memory allocated for each word */ for (i = 0; i < d->n_word; i++) { word = (dictword_t *) & (d->word[i]); if (word->word) ckd_free((void *) word->word); if (word->ciphone) ckd_free((void *) word->ciphone); } if (d->word) ckd_free((void *) d->word); if (d->ht) hash_table_free(d->ht); if (d->mdef) bin_mdef_free(d->mdef); if (d->ngram_g2p_model) ngram_model_free(d->ngram_g2p_model); ckd_free((void *) d); return 0; }
void dram_system_done(void) { struct dram_system_t *system; /* Free list of dram systems */ if (dram_system_table) { for (hash_table_find_first(dram_system_table, (void **) &system); system ; hash_table_find_next(dram_system_table, (void **) &system)) { /* Dump Report for DRAM system */ if (dram_report_file) dram_system_dump(system, dram_report_file); dram_system_free(system); } } hash_table_free(dram_system_table); /* Close report File */ file_close(dram_report_file); }
void test_hash_table_remove(void) { HashTable *hash_table; char buf[10]; hash_table = generate_hash_table(); assert(hash_table_num_entries(hash_table) == NUM_TEST_VALUES); sprintf(buf, "%i", 5000); assert(hash_table_lookup(hash_table, buf) != NULL); /* Remove an entry */ hash_table_remove(hash_table, buf); /* Check entry counter */ assert(hash_table_num_entries(hash_table) == 9999); /* Check that NULL is returned now */ assert(hash_table_lookup(hash_table, buf) == NULL); /* Try removing a non-existent entry */ sprintf(buf, "%i", -1); hash_table_remove(hash_table, buf); assert(hash_table_num_entries(hash_table) == 9999); hash_table_free(hash_table); }
int cmd_ln_free_r(cmd_ln_t *cmdln) { if (cmdln == NULL) return 0; if (--cmdln->refcount > 0) return cmdln->refcount; if (cmdln->ht) { glist_t entries; gnode_t *gn; int32 n; entries = hash_table_tolist(cmdln->ht, &n); for (gn = entries; gn; gn = gnode_next(gn)) { hash_entry_t *e = (hash_entry_t *)gnode_ptr(gn); cmd_ln_val_free((cmd_ln_val_t *)e->val); } glist_free(entries); hash_table_free(cmdln->ht); cmdln->ht = NULL; } if (cmdln->f_argv) { int32 i; for (i = 0; i < cmdln->f_argc; ++i) { ckd_free(cmdln->f_argv[i]); } ckd_free(cmdln->f_argv); cmdln->f_argv = NULL; cmdln->f_argc = 0; } ckd_free(cmdln); return 0; }
void test_hash_table_new_free(void) { HashTable *hash_table; hash_table = hash_table_new(int_hash, int_equal); assert(hash_table != NULL); /* Add some values */ hash_table_insert(hash_table, &value1, &value1); hash_table_insert(hash_table, &value2, &value2); hash_table_insert(hash_table, &value3, &value3); hash_table_insert(hash_table, &value4, &value4); /* Free the hash table */ hash_table_free(hash_table); /* Test out of memory scenario */ alloc_test_set_limit(0); hash_table = hash_table_new(int_hash, int_equal); assert(hash_table == NULL); assert(alloc_test_get_allocated() == 0); alloc_test_set_limit(1); hash_table = hash_table_new(int_hash, int_equal); assert(hash_table == NULL); assert(alloc_test_get_allocated() == 0); }
void test_hash_table_iterating_remove(void) { HashTable *hash_table; HashTableIterator iterator; char buf[10]; char *val; HashTablePair pair; int count; unsigned int removed; int i; hash_table = generate_hash_table(); /* Iterate over all values in the table */ count = 0; removed = 0; hash_table_iterate(hash_table, &iterator); while (hash_table_iter_has_more(&iterator)) { /* Read the next value */ pair = hash_table_iter_next(&iterator); val = pair.value; /* Remove every hundredth entry */ if ((atoi(val) % 100) == 0) { hash_table_remove(hash_table, val); ++removed; } ++count; } /* Check counts */ assert(removed == 100); assert(count == NUM_TEST_VALUES); assert(hash_table_num_entries(hash_table) == NUM_TEST_VALUES - removed); /* Check all entries divisible by 100 were really removed */ for (i=0; i<NUM_TEST_VALUES; ++i) { sprintf(buf, "%i", i); if (i % 100 == 0) { assert(hash_table_lookup(hash_table, buf) == NULL); } else { assert(hash_table_lookup(hash_table, buf) != NULL); } } hash_table_free(hash_table); }
void vi_x86_core_free(struct vi_x86_core_t *core) { struct vi_x86_inst_t *inst; char *inst_name; /* Free contexts (elements are VI_X86_CONTEXT_EMPTY). */ hash_table_free(core->context_table); /* Free instructions */ HASH_TABLE_FOR_EACH(core->inst_table, inst_name, inst) vi_x86_inst_free(inst); hash_table_free(core->inst_table); /* Free core */ str_free(core->name); free(core); }
static void trans_list_free(fsg_model_t * fsg, int32 i) { hash_iter_t *itor; /* FIXME (maybe): FSG links will all get freed when we call * listelem_alloc_free() so don't bother freeing them explicitly * here. */ if (fsg->trans[i].trans) { for (itor = hash_table_iter(fsg->trans[i].trans); itor; itor = hash_table_iter_next(itor)) { glist_t gl = (glist_t) hash_entry_val(itor->ent); glist_free(gl); } } hash_table_free(fsg->trans[i].trans); hash_table_free(fsg->trans[i].null_trans); }
static void build_widmap(ngram_model_t * base, logmath_t * lmath, int32 n) { ngram_model_set_t *set = (ngram_model_set_t *) base; ngram_model_t **models = set->lms; hash_table_t *vocab; glist_t hlist; gnode_t *gn; int32 i; /* Construct a merged vocabulary and a set of word-ID mappings. */ vocab = hash_table_new(models[0]->n_words, FALSE); /* Create the set of merged words. */ for (i = 0; i < set->n_models; ++i) { int32 j; for (j = 0; j < models[i]->n_words; ++j) { /* Ignore collisions. */ (void) hash_table_enter_int32(vocab, models[i]->word_str[j], j); } } /* Create the array of words, then sort it. */ if (hash_table_lookup(vocab, "<UNK>", NULL) != 0) (void) hash_table_enter_int32(vocab, "<UNK>", 0); /* Now we know the number of unigrams, initialize the base model. */ ngram_model_init(base, &ngram_model_set_funcs, lmath, n, hash_table_inuse(vocab)); base->writable = FALSE; /* We will reuse the pointers from the submodels. */ i = 0; hlist = hash_table_tolist(vocab, NULL); for (gn = hlist; gn; gn = gnode_next(gn)) { hash_entry_t *ent = gnode_ptr(gn); base->word_str[i++] = (char *) ent->key; } glist_free(hlist); qsort(base->word_str, base->n_words, sizeof(*base->word_str), my_compare); /* Now create the word ID mappings. */ if (set->widmap) ckd_free_2d((void **) set->widmap); set->widmap = (int32 **) ckd_calloc_2d(base->n_words, set->n_models, sizeof(**set->widmap)); for (i = 0; i < base->n_words; ++i) { int32 j; /* Also create the master wid mapping. */ (void) hash_table_enter_int32(base->wid, base->word_str[i], i); /* printf("%s: %d => ", base->word_str[i], i); */ for (j = 0; j < set->n_models; ++j) { set->widmap[i][j] = ngram_wid(models[j], base->word_str[i]); /* printf("%d ", set->widmap[i][j]); */ } /* printf("\n"); */ } hash_table_free(vocab); }
void product_distribution_free(product_distribution_t * pd) { if(!pd) { return; } if(pd->offset_distribution != NULL) { hash_table_free(pd->offset_distribution); } free(pd); return; }
void Llvm2siSymbolTableDestroy(Llvm2siSymbolTable *self) { char *key; Llvm2siSymbol *symbol; /* Free symbol hash table */ HASH_TABLE_FOR_EACH(self->table, key, symbol) delete(symbol); hash_table_free(self->table); }
static void dp_free(void) { int32 i; for (i = 0; i < n_word; ++i) ckd_free(word[i]); ckd_free(word); hash_table_free(dict_ht); ckd_free(node_alloc); }
static void test_remove(void) { Hash_table table; hash_table_init(&table); populate(&table); #define VERIFY_REMOVE_NOT_EXISTS(key) \ { \ int v = 234; \ \ VERIFY(!hash_table_get(&table, key, &v)); \ VERIFY(!hash_table_remove(&table, key, &v)); \ VERIFY(!hash_table_get(&table, key, &v)); \ VERIFY(v == 234); \ } #define VERIFY_REMOVE_EXISTS(key) \ { \ int v1, v2; \ \ VERIFY(hash_table_get(&table, key, &v1)); \ VERIFY(hash_table_remove(&table, key, &v2)); \ VERIFY(!hash_table_get(&table, key, NULL)); \ VERIFY(!hash_table_remove(&table, key, NULL)); \ VERIFY(v1 == v2); \ } VERIFY_REMOVE_NOT_EXISTS("foo"); VERIFY_REMOVE_NOT_EXISTS("on"); VERIFY_REMOVE_NOT_EXISTS("tw"); VERIFY_REMOVE_EXISTS("negative one"); VERIFY_REMOVE_EXISTS("zero"); VERIFY_REMOVE_EXISTS("one"); VERIFY_REMOVE_EXISTS("two"); VERIFY_REMOVE_EXISTS("three"); VERIFY_REMOVE_EXISTS("four"); VERIFY_REMOVE_EXISTS("five"); VERIFY_REMOVE_EXISTS("six"); VERIFY_REMOVE_EXISTS("seven"); VERIFY_REMOVE_EXISTS("eight"); VERIFY_REMOVE_EXISTS("nine"); VERIFY_REMOVE_EXISTS("ten"); VERIFY_REMOVE_EXISTS(""); #undef VERIFY_REMOVE_NOT_EXISTS #undef VERIFY_REMOVE_EXISTS // The case where the key exists and val is NULL hash_table_set(&table, "foo", 3, NULL); VERIFY(hash_table_remove(&table, "foo", NULL)); hash_table_free(&table); }
/*****************************************************************//** Frees the adaptive search system at a database shutdown. */ UNIV_INTERN void btr_search_sys_free(void) /*=====================*/ { mem_free(btr_search_latch_temp); btr_search_latch_temp = NULL; mem_heap_free(btr_search_sys->hash_index->heap); hash_table_free(btr_search_sys->hash_index); mem_free(btr_search_sys); btr_search_sys = NULL; }
static void destroy(void) { /* Destroy is called from the main process only, * there is no need to close database here because * it is closed in mod_init already */ if (hash_table) { shm_free(hash_table); hash_table = 0; } if (hash_table_1) { hash_table_free(hash_table_1); shm_free(hash_table_1); hash_table_1 = 0; } if (hash_table_2) { hash_table_free(hash_table_2); shm_free(hash_table_2); hash_table_2 = 0; } }
void test_hash_table_free_functions(void) { HashTable *hash_table; int *key; int *value; int i; /* Create a hash table, fill it with values */ hash_table = hash_table_new(int_hash, int_equal); hash_table_register_free_functions(hash_table, free_key, free_value); allocated_values = 0; for (i=0; i<NUM_TEST_VALUES; ++i) { key = new_key(i); value = new_value(99); hash_table_insert(hash_table, key, value); } assert(allocated_keys == NUM_TEST_VALUES); assert(allocated_values == NUM_TEST_VALUES); /* Check that removing a key works */ i = NUM_TEST_VALUES / 2; hash_table_remove(hash_table, &i); assert(allocated_keys == NUM_TEST_VALUES - 1); assert(allocated_values == NUM_TEST_VALUES - 1); /* Check that replacing an existing key works */ key = new_key(NUM_TEST_VALUES / 3); value = new_value(999); assert(allocated_keys == NUM_TEST_VALUES); assert(allocated_values == NUM_TEST_VALUES); hash_table_insert(hash_table, key, value); assert(allocated_keys == NUM_TEST_VALUES - 1); assert(allocated_values == NUM_TEST_VALUES - 1); /* A free of the hash table should free all of the keys and values */ hash_table_free(hash_table); assert(allocated_keys == 0); assert(allocated_values == 0); }
void frm_symbol_table_done(void) { struct frm_symbol_t *symbol; char *name; /* Free all symbols */ HASH_TABLE_FOR_EACH(frm_symbol_table, name, symbol) frm_symbol_free(symbol); /* Free symbol table */ hash_table_free(frm_symbol_table); }
/*******************************************************************//** Free the INFORMATION SCHEMA trx related cache. */ UNIV_INTERN void trx_i_s_cache_free( /*===============*/ trx_i_s_cache_t* cache) /*!< in, own: cache to free */ { hash_table_free(cache->locks_hash); ha_storage_free(cache->storage); table_cache_free(&cache->innodb_trx); table_cache_free(&cache->innodb_locks); table_cache_free(&cache->innodb_lock_waits); memset(cache, 0, sizeof *cache); }
void vi_si_compute_unit_free(struct vi_si_compute_unit_t *compute_unit) { struct vi_si_work_group_t *work_group; struct vi_si_inst_t *inst; char *work_group_name; char *inst_name; /* Free work-groups */ HASH_TABLE_FOR_EACH(compute_unit->work_group_table, work_group_name, work_group) vi_si_work_group_free(work_group); hash_table_free(compute_unit->work_group_table); /* Free instructions */ HASH_TABLE_FOR_EACH(compute_unit->inst_table, inst_name, inst) vi_si_inst_free(inst); hash_table_free(compute_unit->inst_table); /* Free compute unit */ free(compute_unit->name); free(compute_unit); }
void bin_config_free(struct bin_config_t *bin_config) { /* Free elements */ if (bin_config->elem_list) { bin_config_clear(bin_config); hash_table_free(bin_config->elem_list); } /* Free configuration object */ free(bin_config->file_name); free(bin_config); }
int32 ngram_model_read_classdef(ngram_model_t *model, const char *file_name) { hash_table_t *classes; glist_t hl = NULL; gnode_t *gn; int32 rv = -1; classes = hash_table_new(0, FALSE); if (read_classdef_file(classes, file_name) < 0) { hash_table_free(classes); return -1; } /* Create a new class in the language model for each classdef. */ hl = hash_table_tolist(classes, NULL); for (gn = hl; gn; gn = gnode_next(gn)) { hash_entry_t *he = gnode_ptr(gn); classdef_t *classdef = he->val; if (ngram_model_add_class(model, he->key, 1.0, classdef->words, classdef->weights, classdef->n_words) < 0) goto error_out; } rv = 0; error_out: for (gn = hl; gn; gn = gnode_next(gn)) { hash_entry_t *he = gnode_ptr(gn); ckd_free((char *)he->key); classdef_free(he->val); } glist_free(hl); hash_table_free(classes); return rv; }
void mdef_free(mdef_t * m) { int i, j; if (m) { if (m->sen2cimap) ckd_free((void *) m->sen2cimap); if (m->cd2cisen) ckd_free((void *) m->cd2cisen); /* RAH, go down the ->next list and delete all the pieces */ for (i = 0; i < N_WORD_POSN; i++) for (j = 0; j < m->n_ciphone; j++) if (m->wpos_ci_lclist[i][j]) { mdef_free_recursive_lc(m->wpos_ci_lclist[i][j]->next); mdef_free_recursive_rc(m->wpos_ci_lclist[i][j]-> rclist); } for (i = 0; i < N_WORD_POSN; i++) for (j = 0; j < m->n_ciphone; j++) if (m->wpos_ci_lclist[i][j]) ckd_free((void *) m->wpos_ci_lclist[i][j]); if (m->wpos_ci_lclist) ckd_free_2d((void *) m->wpos_ci_lclist); if (m->sseq) ckd_free_2d((void *) m->sseq); /* Free phone context */ if (m->phone) ckd_free((void *) m->phone); if (m->ciphone_ht) hash_table_free(m->ciphone_ht); for (i = 0; i < m->n_ciphone; i++) { if (m->ciphone[i].name) ckd_free((void *) m->ciphone[i].name); } if (m->ciphone) ckd_free((void *) m->ciphone); if (m->st2senmap) ckd_free((void *) m->st2senmap); ckd_free((void *) m); } }
int ngram_model_casefold(ngram_model_t * model, int kase) { int writable, i; hash_table_t *new_wid; /* Were word strings already allocated? */ writable = model->writable; /* Either way, we are going to allocate some word strings. */ model->writable = TRUE; /* And, don't forget, we need to rebuild the word to unigram ID * mapping. */ new_wid = hash_table_new(model->n_words, FALSE); for (i = 0; i < model->n_words; ++i) { char *outstr; if (writable) { outstr = model->word_str[i]; } else { outstr = ckd_salloc(model->word_str[i]); } /* Don't case-fold <tags> or [classes] */ if (outstr[0] == '<' || outstr[0] == '[') { } else { switch (kase) { case NGRAM_UPPER: ucase(outstr); break; case NGRAM_LOWER: lcase(outstr); break; default: ; } } model->word_str[i] = outstr; /* Now update the hash table. We might have terrible * collisions here, so warn about them. */ if (hash_table_enter_int32(new_wid, model->word_str[i], i) != i) { E_WARN("Duplicate word in dictionary after conversion: %s\n", model->word_str[i]); } } /* Swap out the hash table. */ hash_table_free(model->wid); model->wid = new_wid; return 0; }
void test_hash_table_iterating(void) { HashTable *hash_table; HashTableIterator iterator; int count; hash_table = generate_hash_table(); /* Iterate over all values in the table */ count = 0; hash_table_iterate(hash_table, &iterator); while (hash_table_iter_has_more(&iterator)) { hash_table_iter_next(&iterator); ++count; } assert(count == NUM_TEST_VALUES); /* Test iter_next after iteration has completed. */ assert(hash_table_iter_next(&iterator) == HASH_TABLE_NULL); hash_table_free(hash_table); /* Test iterating over an empty table */ hash_table = hash_table_new(int_hash, int_equal); hash_table_iterate(hash_table, &iterator); assert(hash_table_iter_has_more(&iterator) == 0); hash_table_free(hash_table); }
/** * Count slots needed for a scope's hash table * * Before filling literal indexes 'hash' table we shall initiate it with number of neccesary literal indexes. * Since bytecode is divided into blocks and id of the block is a part of hash key, we shall divide bytecode * into blocks and count unique literal indexes used in each block. * * @return total number of literals in scope */ size_t scopes_tree_count_literals_in_blocks (scopes_tree tree) /**< scope */ { assert_tree (tree); size_t result = 0; if (lit_id_to_uid != null_hash) { hash_table_free (lit_id_to_uid); lit_id_to_uid = null_hash; } next_uid = 0; global_oc = 0; assert_tree (tree); vm_instr_counter_t instr_pos; bool header = true; for (instr_pos = 0; instr_pos < tree->instrs_count; instr_pos++) { op_meta *om_p = extract_op_meta (tree->instrs, instr_pos); if (om_p->op.op_idx != VM_OP_META && !header) { break; } if (om_p->op.op_idx == VM_OP_REG_VAR_DECL) { header = false; } result += count_new_literals_in_instr (om_p); } for (vm_instr_counter_t var_decl_pos = 0; var_decl_pos < tree->var_decls_cout; var_decl_pos++) { op_meta *om_p = extract_op_meta (tree->var_decls, var_decl_pos); result += count_new_literals_in_instr (om_p); } for (uint8_t child_id = 0; child_id < tree->t.children_num; child_id++) { result += scopes_tree_count_literals_in_blocks (*(scopes_tree *) linked_list_element (tree->t.children, child_id)); } for (; instr_pos < tree->instrs_count; instr_pos++) { op_meta *om_p = extract_op_meta (tree->instrs, instr_pos); result += count_new_literals_in_instr (om_p); } return result; } /* scopes_tree_count_literals_in_blocks */
static void start_new_block_if_necessary (void) { if (global_oc % BLOCK_SIZE == 0) { next_uid = 0; if (lit_id_to_uid != null_hash) { hash_table_free (lit_id_to_uid); lit_id_to_uid = null_hash; } lit_id_to_uid = hash_table_init (sizeof (lit_cpointer_t), sizeof (idx_t), HASH_SIZE, lit_id_hash); } }
void hosting_clear_cli_attachments (client_connection_t *cli, int free) { if (cli->cli_module_attachments) { maphash (hosting_clear_attachment, cli->cli_module_attachments); if (free) { hash_table_free (cli->cli_module_attachments); cli->cli_module_attachments = NULL; } else clrhash (cli->cli_module_attachments); } }