caryll_buffer *caryll_write_gsub_ligature_subtable(otl_subtable *_subtable) { caryll_buffer *buf = bufnew(); subtable_gsub_ligature *subtable = &(_subtable->gsub_ligature); ligature_aggerator *h = NULL, *s, *tmp; uint16_t nLigatures = subtable->to->numGlyphs; for (uint16_t j = 0; j < nLigatures; j++) { int sgid = subtable->from[j]->glyphs[0].gid; HASH_FIND_INT(h, &sgid, s); if (!s) { NEW(s); s->gid = sgid; s->ligid = HASH_COUNT(h); HASH_ADD_INT(h, gid, s); } } HASH_SORT(h, by_gid); otl_coverage *startCoverage; NEW(startCoverage); startCoverage->numGlyphs = HASH_COUNT(h); NEW_N(startCoverage->glyphs, startCoverage->numGlyphs); uint16_t jj = 0; foreach_hash(s, h) { s->ligid = jj; startCoverage->glyphs[jj].gid = s->gid; startCoverage->glyphs[jj].name = NULL; jj++; }
uint32_t moloch_session_monitoring() { uint32_t count = 0; int i; for (i = 0; i < config.packetThreads; i++) { count += HASH_COUNT(h_, sessions[i][SESSION_TCP]) + HASH_COUNT(h_, sessions[i][SESSION_UDP]) + HASH_COUNT(h_, sessions[i][SESSION_ICMP]); } return count; }
bool consolidate_gsub_single(caryll_font *font, table_otl *table, otl_subtable *_subtable, sds lookupName) { subtable_gsub_single *subtable = &(_subtable->gsub_single); consolidate_coverage(font, subtable->from, lookupName); consolidate_coverage(font, subtable->to, lookupName); uint16_t len = (subtable->from->numGlyphs < subtable->to->numGlyphs ? subtable->from->numGlyphs : subtable->from->numGlyphs); gsub_single_map_hash *h = NULL; for (uint16_t k = 0; k < len; k++) { if (subtable->from->glyphs[k].name && subtable->to->glyphs[k].name) { gsub_single_map_hash *s; int fromid = subtable->from->glyphs[k].gid; HASH_FIND_INT(h, &fromid, s); if (s) { fprintf(stderr, "[Consolidate] Double-mapping a glyph in a " "single substitution /%s.\n", subtable->from->glyphs[k].name); } else { NEW(s); s->fromid = subtable->from->glyphs[k].gid; s->toid = subtable->to->glyphs[k].gid; s->fromname = subtable->from->glyphs[k].name; s->toname = subtable->to->glyphs[k].name; HASH_ADD_INT(h, fromid, s); } } } HASH_SORT(h, by_from_id); if (HASH_COUNT(h) != subtable->from->numGlyphs || HASH_COUNT(h) != subtable->to->numGlyphs) { fprintf(stderr, "[Consolidate] In single subsitution lookup %s, some " "mappings are ignored.\n", lookupName); } subtable->from->numGlyphs = HASH_COUNT(h); subtable->to->numGlyphs = HASH_COUNT(h); FREE(subtable->from->glyphs); FREE(subtable->to->glyphs); NEW_N(subtable->from->glyphs, subtable->from->numGlyphs); NEW_N(subtable->to->glyphs, subtable->to->numGlyphs); { gsub_single_map_hash *s, *tmp; uint16_t j = 0; HASH_ITER(hh, h, s, tmp) { subtable->from->glyphs[j].gid = s->fromid; subtable->from->glyphs[j].name = s->fromname; subtable->to->glyphs[j].gid = s->toid; subtable->to->glyphs[j].name = s->toname; j++; HASH_DEL(h, s); free(s); } }
int matchInodefiles(void) { fileop_t *f; fileop_t *finode; char fpath[PATH_MAX]; struct stat st; int64_t inode; // first try to match inodefiles to files for (f = files; f != NULL; f = (fileop_t*) (f->hh.next)) { // get ino of the file getAbsolutePath(fpath, config.snapshot, f->relpath); if (stat(fpath, &st) != -1) { inode = st.st_ino; } else { // this should not happen: we can't open the file errMsg(LOG_WARNING, "Could not stat file %s ", fpath); } // if this ino is in inodefiles, merge the operations in f and // remove the element from inodefiles // TODO: what about order? HASH_FIND_INT(inodefiles, &inode, finode); if (finode != NULL) { if (mergeOperations(f, finode) != 0) return -1; free(finode->operations); HASH_DEL(inodefiles, finode); free(finode); } // if there are no entries in inodefiles left, we are done if (HASH_COUNT(inodefiles) == 0) // HASH_COUNT is cheap return 0; } // the rest must be matched by file tree walk (nftw) int flags = 0; flags |= FTW_MOUNT; // stay in the file system flags |= FTW_PHYS; // do not dereference symlinks if (nftw(config.snapshot, matchInode, 10, flags) == -1) { errMsg(LOG_WARNING, "Could not stat file %s ", fpath); } // if there are still some entries left in inodefiles, we have a problem if (HASH_COUNT(inodefiles) == 0) return 0; else return -1; }
void MAT_PARSER_show(Parser* p) { // Local variables MAT_Parser* parser = (MAT_Parser*)PARSER_get_data(p); int len_bus_list; int len_branch_list; int len_gen_list; int len_cost_list; int len_util_list; // No parser if (!parser) return; // List lengths LIST_len(MAT_Bus,parser->bus_list,next,len_bus_list); LIST_len(MAT_Branch,parser->branch_list,next,len_branch_list); LIST_len(MAT_Gen,parser->gen_list,next,len_gen_list); LIST_len(MAT_Cost,parser->cost_list,next,len_cost_list); LIST_len(MAT_Util,parser->util_list,next,len_util_list); // Show printf("\nParsed Data\n"); printf("base power : %.2f\n",parser->base_power); printf("bus list : %d\n",len_bus_list); printf("bus hash : %d\n",HASH_COUNT(parser->bus_hash)); printf("gen list : %d\n",len_gen_list); printf("branch list: %d\n",len_branch_list); printf("cost list : %d\n",len_cost_list); printf("util list : %d\n",len_util_list); }
int olsr_db_rt_report_so(char** str_out) { int report_str_len = 70; olsr_db_rt_t* current_entry = rt_set; char* output; char entry_str[report_str_len + 1]; output = malloc(sizeof(char) * report_str_len * (HASH_COUNT(rt_set)) + 1); if(output == NULL) { return false; } struct timeval curr_time; gettimeofday(&curr_time, NULL); // initialize first byte to \0 to mark output as empty *output = '\0'; while(current_entry != NULL) { snprintf(entry_str, report_str_len + 1, MAC "\t" MAC "\t" MAC "\t%i\t%5.2f\n", EXPLODE_ARRAY6(current_entry->dest_addr), EXPLODE_ARRAY6(current_entry->next_hop), EXPLODE_ARRAY6(current_entry->precursor_addr), current_entry->hop_count, current_entry->link_quality); strcat(output, entry_str); current_entry = current_entry->hh.next; } *str_out = output; return true; }
void unit_end(unit_table_t *unit) { struct link_unit_t *ut; char buf[10240]; if(unit->valid == true || HASH_COUNT(unit->link_unit) > 1) { bzero(buf, 10240); // emit linked unit lists; if(unit->link_unit != NULL) { sprintf(buf, "type=unit list=\""); for(ut=unit->link_unit; ut != NULL; ut=ut->hh.next) { sprintf(buf+strlen(buf), "%d-%d,", ut->id.tid, ut->id.unitid); } sprintf(buf+strlen(buf), "\" tid=%d \n", unit->tid); emit_log(unit, buf); } } delete_unit_hash(unit->link_unit, unit->mem_unit); //if(unit->link_unit != NULL) printf("link_unit is not NULL %p\n", unit->link_unit); unit->link_unit = NULL; unit->mem_unit = NULL; unit->valid = false; unit->r_addr = 0; unit->w_addr = 0; unit->unitid++; }
int draw_particles(void) { static Vec pointv[512]; static size_t pointc = ARRLEN(pointv, Vec); double z = zoom(); particle_t* p = particles; int i = 0; size_t count = HASH_COUNT(particles); while(i < count) { for(; p != NULL && i < pointc; p = p->hh.next, i++) { cpVect pos = cpBodyGetPos(p->body); if(fabs(pos.x) <= z / 2 && fabs(pos.y) <= z / 2) { // rough check if position is inside screen borders // TODO: possibly make it separate function pointv[i] = cpv2vec(pos); } } if(draw_points(pointv, i % pointc)) { return -1; } } return 0; }
int pkg_delete_files(struct pkg *pkg, unsigned force) /* force: 0 ... be careful and vocal about it. * 1 ... remove files without bothering about checksums. * 2 ... like 1, but remain silent if removal fails. */ { struct pkg_file *file = NULL; int nfiles, cur_file = 0; nfiles = HASH_COUNT(pkg->files); if (nfiles == 0) return (EPKG_OK); pkg_emit_delete_files_begin(pkg); pkg_emit_progress_start(NULL); while (pkg_files(pkg, &file) == EPKG_OK) { pkg_emit_progress_tick(cur_file++, nfiles); if (file->keep == 1) continue; pkg_delete_file(pkg, file, force); } pkg_emit_progress_tick(nfiles, nfiles); pkg_emit_delete_files_finished(pkg); return (EPKG_OK); }
static struct node *try_flat_dump(struct node *tree) { struct node *current; current = tree; while (current != NULL) { if (HASH_COUNT(current) == 1) { printf("%s ", current->word); if (current->childs == NULL) { printf("\n"); return NULL; } } else { printf("\n"); return current; } current = current->childs; } printf("\n"); return current; }
uint32_t get_num_nodes(Graph *graph){ if(graph == NULL){ puts("Error: get_node graph given is null."); exit(1); } return (uint32_t)HASH_COUNT(graph->nodes); }
void print_bot_list(void) { struct bot *b, *tmp; if (HASH_COUNT(bots) == 0) printf("No bots!\n"); HASH_ITER(hh, bots, b, tmp) { printf("%s: %s\n", b->name, b->desc); }
int main (int argc, char *argv[]) { char in[10]; int id = 1; User *s; unsigned num_users; while (1) { printf ("1. add user\n"); printf ("2. find user\n"); printf ("3. delete user\n"); printf ("4. delete all users\n"); printf ("5. sort items by name\n"); printf ("6. sort items by id\n"); printf ("7. print users\n"); printf ("8. count users\n"); gets (in); switch (atoi (in)) { case 1: printf ("name?\n"); add_user (id++, gets (in)); break; case 2: printf ("id?\n"); s = find_user (atoi (gets (in))); printf ("user: %s\n", s ? s->name : "unknown"); break; case 3: printf ("id?\n"); s = find_user (atoi (gets (in))); if (s) delete_user (s); else printf ("id unknown\n"); break; case 4: delete_all (); break; case 5: sort_by_name (); break; case 6: sort_by_id (); break; case 7: print_users (); break; case 8: num_users = HASH_COUNT (users); printf ("there are %u users\n", num_users); break; } } }
const float overlap_coefficient_similarity_custom(const char *str1, const char *str2, std_tokenizer_t *tokenizer) { hash_token_t *h1 = tokenizer->tok_uq_hash_func(str1, tokenizer->delimiters); hash_token_t *h2 = tokenizer->tok_uq_hash_func(str2, tokenizer->delimiters); hash_token_t *all = merge_tokens(h1, h2); unsigned int ch1 = HASH_COUNT(h1), ch2 = HASH_COUNT(h2), ch3 = HASH_COUNT(all); unsigned int ct = (ch1 + ch2) - ch3; const float ret = ((float) ct) / ((float) MIN((float)ch1, (float)ch2)); hash_token_free(h1); hash_token_free(h2); hash_token_free(all); return ret; }
const float cosine_similarity_custom(const char *str1, const char *str2, std_tokenizer_t *tokenizer) { hash_token_t *h1 = tokenizer->tok_uq_hash_func(str1, tokenizer->delimiters); hash_token_t *h2 = tokenizer->tok_uq_hash_func(str2, tokenizer->delimiters); hash_token_t *all = merge_tokens(h1, h2); unsigned int ch1 = HASH_COUNT(h1), ch2 = HASH_COUNT(h2), ch3 = HASH_COUNT(all); unsigned int ct = (ch1 + ch2) - ch3; const float ret = ((float)ct / (powf((float)ch1, (float)0.5) * powf((float)ch2, (float)0.5))); hash_token_free(h1); hash_token_free(h2); hash_token_free(all); return ret; }
int pm_next_loop(void (*child_changed_status)(process_struct *ps)) { sigsetjmp(saved_jump_buf, 1); pm_can_jump = 0; while (!terminated && (HASH_COUNT(exited_children) > 0 || signaled)) pm_check_children(child_changed_status, terminated); pm_check_pending_processes(); // Check for pending signals arrived while we were in the signal handler pm_can_jump = 1; if (terminated) return -1; else return 0; }
int transfer(char *host, char *port) { int sfd; // server socket fileop_t *f; // contains all operations for a file sfd = inetConnect(host, port, SOCK_STREAM); if (sfd == -1) { errnoMsg(LOG_ERR, "Unable to connect to the server."); return -1; } // initiate sync (sync-id, resource, number of files) errMsg(LOG_INFO, "Number of files: %d", HASH_COUNT(files)); switch (initiateSync(sfd, HASH_COUNT(files))) { case -1: // error return -1; case -2: // there's nothing to synchronize return 0; default: break; } // iterate over files in correct order HASH_SORT(files, sortByOrder); for (f = files; f != NULL; f = (fileop_t*) (f->hh.next)) { if (transferFile(sfd, f) == -1) { errMsg(LOG_ERR, "Transfer of file %s has failed.", f->relpath); return -1; } } // wait for ACK SyncFinish *conf; conf = (SyncFinish *) recvMessage(sfd, SyncFinishType, NULL); if (conf == NULL) { errMsg(LOG_ERR, "Could not get transfer confirmation."); return -1; } return 0; }
/** * WARNING: This function actually deletes a mobile from the world. * * @param char_data *ch The person doing the deleting. * @param mob_vnum vnum The vnum to delete. */ void olc_delete_mobile(char_data *ch, mob_vnum vnum) { void extract_pending_chars(); void remove_mobile_from_table(char_data *mob); char_data *proto, *mob_iter, *next_mob; descriptor_data *desc; struct global_data *glb, *next_glb; room_template *rmt, *next_rmt; sector_data *sect, *next_sect; crop_data *crop, *next_crop; bld_data *bld, *next_bld; bool found; if (!(proto = mob_proto(vnum))) { msg_to_char(ch, "There is no such mobile %d.\r\n", vnum); return; } if (HASH_COUNT(mobile_table) <= 1) { msg_to_char(ch, "You can't delete the last mob.\r\n"); return; } // remove mobs from the list: DO THIS FIRST for (mob_iter = character_list; mob_iter; mob_iter = next_mob) { next_mob = mob_iter->next; if (IS_NPC(mob_iter)) { if (GET_MOB_VNUM(mob_iter) == vnum) { // this is the removed mob act("$n has been deleted.", FALSE, mob_iter, NULL, NULL, TO_ROOM); extract_char(mob_iter); } } } // their data will already be free, so we need to clear them out now extract_pending_chars(); // pull from hash ONLY after removing from the world remove_mobile_from_table(proto); // save mob index and mob file now so there's no trouble later save_index(DB_BOOT_MOB); save_library_file_for_vnum(DB_BOOT_MOB, vnum); // update buildings HASH_ITER(hh, building_table, bld, next_bld) { found = delete_mob_from_spawn_list(&GET_BLD_SPAWNS(bld), vnum); found |= delete_from_interaction_list(&GET_BLD_INTERACTIONS(bld), TYPE_MOB, vnum); if (found) { save_library_file_for_vnum(DB_BOOT_BLD, GET_BLD_VNUM(bld)); } }
dessert_per_result_t send_tc(void *data, struct timeval *scheduled, struct timeval *interval) { pthread_rwlock_wrlock(&pp_rwlock); if (HASH_COUNT(dir_neighbors_head) == 0) { return 0; } dessert_msg_t *tc; dessert_msg_new(&tc); tc->ttl = TTL_MAX; tc->u8 = ++tc_seq_nr; // delete old entries from NH list node_neighbors_t *dir_neigh = dir_neighbors_head; while (dir_neigh) { if (dir_neigh->entry_age-- == 0) { node_neighbors_t* el_to_delete = dir_neigh; HASH_DEL(dir_neighbors_head, el_to_delete); free(el_to_delete); } dir_neigh = dir_neigh->hh.next; } // add TC extension dessert_ext_t *ext; uint8_t ext_size = 1 + ((sizeof(node_neighbors_t)- sizeof(dir_neighbors_head->hh)) * HASH_COUNT(dir_neighbors_head)); dessert_msg_addext(tc, &ext, LSR_EXT_TC, ext_size); void* tc_ext = ext->data; memcpy(tc_ext, &(ext_size), 1); tc_ext++; // copy NH list into extension dir_neigh = dir_neighbors_head; while (dir_neigh) { memcpy(tc_ext, dir_neigh->addr, ETH_ALEN); tc_ext += ETH_ALEN; memcpy(tc_ext, &(dir_neigh->entry_age), 1); tc_ext++; memcpy(tc_ext, &(dir_neigh->weight), 1); tc_ext++; dir_neigh = dir_neigh->hh.next; } // add l2.5 header dessert_msg_addext(tc, &ext, DESSERT_EXT_ETH, ETHER_HDR_LEN); struct ether_header* l25h = (struct ether_header*) ext->data; memcpy(l25h->ether_shost, dessert_l25_defsrc, ETH_ALEN); memcpy(l25h->ether_dhost, ether_broadcast, ETH_ALEN); dessert_meshsend_fast(tc, NULL); dessert_msg_destroy(tc); pthread_rwlock_unlock(&pp_rwlock); return 0; }
void moloch_session_add_tag_type(MolochSession_t *session, int tagtype, const char *tag) { moloch_session_incr_outstanding(session); moloch_db_get_tag(session, tagtype, tag, moloch_session_get_tag_cb); if (session->stopSaving == 0 && HASH_COUNT(s_, config.dontSaveTags)) { MolochString_t *tstring; HASH_FIND(s_, config.dontSaveTags, tag, tstring); if (tstring) { session->stopSaving = (long)tstring->uw; } } }
void remove_tex(unsigned int idmin, unsigned int idmax) { unsigned int n = 0; texlist *current, *tmp; GLuint *t = (GLuint*)malloc(HASH_COUNT(list) * sizeof(GLuint)); HASH_ITER(hh, list, current, tmp) { if (current->id >= idmin && current->id < idmax) { t[n++] = current->tex_id; HASH_DEL(list, current); free(current); } } glDeleteTextures(n, t); free(t); #ifdef LOG_TEXTUREMEM if (log_cb) log_cb(RETRO_LOG_DEBUG, "RMVTEX nbtex is now %d (%06x - %06x)\n", HASH_COUNT(list), idmin, idmax); #endif }
FCITX_EXPORT_API char* fcitx_utils_string_hash_set_join(FcitxStringHashSet* sset, char delim) { if (!sset) return NULL; if (HASH_COUNT(sset) == 0) return strdup(""); size_t len = 0; HASH_FOREACH(string, sset, FcitxStringHashSet) { len += strlen(string->name) + 1; }
void moloch_session_add_tag(MolochSession_t *session, const char *tag) { moloch_session_incr_outstanding(session); moloch_db_get_tag(session, config.tagsField, tag, moloch_session_get_tag_cb); moloch_field_string_add(config.tagsStringField, session, tag, -1, TRUE); if (session->stopSaving == 0 && HASH_COUNT(s_, config.dontSaveTags)) { MolochString_t *tstring; HASH_FIND(s_, config.dontSaveTags, tag, tstring); if (tstring) { session->stopSaving = (int)(long)tstring->uw; } } }
void query(struct trie *lexicon, FILE *index, struct mapping **map, char *term) { struct token *results = malloc(sizeof(struct token)); struct token *s, *tmp; uint32_t ptr = lexicon_find(lexicon, term); fseek(index, ptr, SEEK_SET); index_find(index, ptr, &results); printf("%s\n", term); printf("%u\n", HASH_COUNT(results)); HASH_ITER(hh, results, s, tmp) { printf("%s, %u\n", unmap_document(map, s->id), s->count); }
void wise_print_stats() { int i; for (i = 0; i < 4; i++) { LOG("%8s lookups:%7d cache:%7d requests:%7d inprogress:%7d fail:%7d hash:%7d list:%7d", wiseStrings[i], stats[i][0], stats[i][1], stats[i][2], stats[i][3], stats[i][4], HASH_COUNT(wih_, itemHash[i]), DLL_COUNT(wil_, &itemList[i])); } }
int main(int argc,char *argv[]) { int i; example_user_t *user, *users=NULL; /* create elements */ for(i=0;i<10;i++) { if ( (user = (example_user_t*)malloc(sizeof(example_user_t))) == NULL) exit(-1); user->id = i; user->cookie = i*i; HASH_ADD_INT(users,id,user); } printf("hash contains %d items\n", HASH_COUNT(users)); return 0; }
static void consolidateMarkArray(caryll_font *font, table_otl *table, sds lookupName, otl_coverage *marks, otl_mark_array *markArray, uint16_t classCount) { mark_hash *hm = NULL; for (uint16_t k = 0; k < marks->numGlyphs; k++) { if (marks->glyphs[k].name) { mark_hash *s = NULL; HASH_FIND_INT(hm, &(marks->glyphs[k].gid), s); if (!s && markArray->records[k].anchor.present && markArray->records[k].markClass < classCount) { NEW(s); s->gid = marks->glyphs[k].gid; s->name = marks->glyphs[k].name; s->markrec = markArray->records[k]; HASH_ADD_INT(hm, gid, s); } else { fprintf(stderr, "[Consolidate] Ignored invalid or " "double-mapping mark definition for /%s in " "lookup %s.\n", marks->glyphs[k].name, lookupName); } } } HASH_SORT(hm, mark_by_gid); marks->numGlyphs = HASH_COUNT(hm); markArray->markCount = HASH_COUNT(hm); mark_hash *s, *tmp; uint16_t k = 0; HASH_ITER(hh, hm, s, tmp) { marks->glyphs[k].gid = s->gid; marks->glyphs[k].name = s->name; markArray->records[k] = s->markrec; k++; HASH_DEL(hm, s); free(s); }
/* Client Mode */ void *connectToPeer(void *temp) { struct sockaddr_in remote_addr; int new_fd; char *buffer = malloc(MAXBUFFSIZE), *buf1 = malloc(MAXBUFFSIZE), *buf2 = malloc(MAXBUFFSIZE); struct peerList *peer = (struct peerList *)temp; if (!add_peer(peer) && (peer->net_fd)) return NULL; /* Create TCP Socket */ if ((new_fd = socket(AF_INET, SOCK_STREAM, 0)) < 0) { perror("could not create socket"); exit(1); } memset((char *)&remote_addr, 0, sizeof(remote_addr)); remote_addr.sin_family = AF_INET; remote_addr.sin_port = htons(peer->listenPort); inet_aton((char *)inet_ntoa(peer->listenIP), &remote_addr.sin_addr); sprintf(buf1, "%s %d", inet_ntoa(local_info->listenIP), local_info->listenPort); sprintf(buf2, "%s %d", inet_ntoa(peer->listenIP), peer->listenPort); if (debug) printf("COMPARING %s --- %s\n", buf1, buf2); if (!(strcmp(buf1, buf2))) return NULL; puts("Client Mode:"); printf("NEW PEER: Connecting to %s:%d\n", inet_ntoa(remote_addr.sin_addr), ntohs(remote_addr.sin_port)); /* Connect to server */ if ((connect(new_fd, (struct sockaddr *)&remote_addr, sizeof(remote_addr))) < 0) { printf("NEW PEER: Peer Removed %s:%d: Failed to connect\n", inet_ntoa(peer->listenIP), peer->listenPort); if (debug) printf("errno: %d\n", errno); remove_peer(peer); } else { /* Create single link state packet */ strcpy(buffer, peer->tapDevice); peer->net_fd = new_fd; printf("NEW PEER: Connected to server %s:%d - %d\n", inet_ntoa(peer->listenIP), peer->listenPort, peer->net_fd); // sprintf(buffer, "0xabcd 2048%s blah blah", send_peerList(peer)); // send(peer->net_fd, buffer, strlen(buffer), 0); send_singleLinkStatePacket(peer); lsPacket->neighbors = HASH_COUNT(peers); if (debug) print_linkStatePacket(); } if (debug) puts("Leaving connectToPeer"); return NULL; }
Graph *load_graph(Database *db){ if(db == NULL){ puts("Error: load graph failed. Db is null."); exit(1); } Graph *graph = _new_graph(); graph->db = db; graph->nodes = NULL; int i; uint64_t count = 0; uint64_t total = 0; NodeData *node_data; ArcData *arc_data; Node *node; Arc *arc; ArcNode *arcnode; puts("Loading graph into memory..."); fseek(db->fp, 0L, SEEK_SET); while(!feof(db->fp)){ node_data = malloc(sizeof(NodeData)); count = read_database(node_data, sizeof(NodeData), db); total += count; if(count == 0){ continue; } HASH_FIND_INT(graph->nodes, &(node_data->id), node); if(node != NULL){ printf("Error: node %i already in tree.\n", node_data->id); exit(1); } node = _new_node(node_data, graph); HASH_ADD_INT(graph->nodes, data->id, node); if(node_data->num_arcnodes > 0){ for(i=0; i<node_data->num_arcnodes; i++){ arc_data = malloc(sizeof(ArcData)); total += read_database((void *)arc_data, sizeof(ArcData), db); arc = _new_arc(arc_data); arcnode = _new_arcnode(arc); node->arcnodes[i] = arcnode; } } } printf("Loaded %i nodes.\n", (int)HASH_COUNT(graph->nodes)); printf("Read %llu bytes from database.\n", (long long unsigned int)total); return graph; }
/** * WARNING: This function actually deletes a crop. * * @param char_data *ch The person doing the deleting. * @param crop_vnum vnum The vnum to delete. */ void olc_delete_crop(char_data *ch, crop_vnum vnum) { void remove_crop_from_table(crop_data *crop); extern const sector_vnum climate_default_sector[NUM_CLIMATES]; obj_data *obj, *next_obj; descriptor_data *desc; struct map_data *map; room_data *room; crop_data *crop; sector_data *base = NULL; int count; if (!(crop = crop_proto(vnum))) { msg_to_char(ch, "There is no such crop %d.\r\n", vnum); return; } if (HASH_COUNT(crop_table) <= 1) { msg_to_char(ch, "You can't delete the last crop.\r\n"); return; } // remove it from the hash table first remove_crop_from_table(crop); // save base sect for later base = sector_proto(climate_default_sector[GET_CROP_CLIMATE(crop)]); // save index and crop file now save_index(DB_BOOT_CROP); save_library_file_for_vnum(DB_BOOT_CROP, vnum); // update world count = 0; LL_FOREACH(land_map, map) { room = real_real_room(map->vnum); if (map->crop_type == crop || (room && ROOM_CROP(room) == crop)) { if (!room) { room = real_room(map->vnum); } set_crop_type(room, NULL); // remove it explicitly change_terrain(room, GET_SECT_VNUM(base)); ++count; } }