/* * Initialize data structures */ int init_trusted(void) { if (db_mode == ENABLE_CACHE) { hash_table_1 = new_hash_table(); if (!hash_table_1) return -1; hash_table_2 = new_hash_table(); if (!hash_table_2) goto error; hash_table = (struct trusted_list ***)shm_malloc(sizeof(struct trusted_list **)); if (!hash_table) goto error; *hash_table = hash_table_1; if (reload_trusted_table() == -1) { LOG(L_CRIT, "init_trusted(): Reload of trusted table failed\n"); goto error; } } return 0; error: clean_trusted(); return -1; }
char lisp_put_symbol(const char* name, void* atom) { if(!lisp_symtbl) lisp_symtbl=new_hash_table(LPSYMTBLSIZE,hash_symbol); return hash_table_put(lisp_symtbl,name,(void*)atom); }
/** * @name new_search * * Create and initialize a new search record. */ SEARCH_RECORD *Wordrec::new_search(CHUNKS_RECORD *chunks_record, int num_joints, BLOB_CHOICE_LIST_VECTOR *best_char_choices, WERD_CHOICE *best_choice, WERD_CHOICE *raw_choice, STATE *state) { SEARCH_RECORD *this_search; this_search = (SEARCH_RECORD *) memalloc (sizeof (SEARCH_RECORD)); this_search->open_states = MakeHeap (wordrec_num_seg_states * 20); this_search->closed_states = new_hash_table(); if (state) this_search->this_state = new_state (state); else cprintf ("error: bad initial state in new_search\n"); this_search->first_state = new_state (this_search->this_state); this_search->best_state = new_state (this_search->this_state); this_search->best_choice = best_choice; this_search->raw_choice = raw_choice; this_search->best_char_choices = best_char_choices; this_search->num_joints = num_joints; this_search->num_states = 0; this_search->before_best = 0; this_search->segcost_bias = 0; return (this_search); }
/* carrega todos os pares CHAVE=VALOR do arquivo de tradução * numa hash_table, para eliminar a necessidade de abrir varias * vezes o arquivo. */ hash_table *load_translation() { char *filename; hash_table *trans = NULL; FILE *fp; char buffer[256]; int buf_size; /* obter a localização do arquivo de tradução no ldc.conf */ filename = TRANSLATION_FILE; if (filename == NULL) return NULL; if ((fp = fopen(filename, "r")) == NULL) return NULL; /* criar hash_table para armazenar as tuplas de tradução */ trans = new_hash_table(); buf_size = sizeof(buffer)/sizeof(buffer[0]); while (fgets(buffer, buf_size, fp) != NULL) { struct tuple *tmp = parse_tuple(buffer); if (tmp != NULL) add_hash_info(trans, tmp->key, tmp->value); free_tuple (&tmp); } fclose(fp); return trans; }
char lisp_get_symbol(const char* name, void** atom) { if(!lisp_symtbl) lisp_symtbl=new_hash_table(LPSYMTBLSIZE,hash_symbol); *atom=hash_table_get(lisp_symtbl,name); return *atom?1:0; }
struct hash_table *new_hash () { struct hash_table *rv = new_hash_table (DEFAULT_HASH_SIZE, "new_hash"); if (!rv) return rv; memset (&rv -> buckets [0], 0, DEFAULT_HASH_SIZE * sizeof (struct hash_bucket *)); return rv; }
//create a new lexicon FRISO_API friso_dic_t friso_dic_new() { register uint_t t; friso_dic_t dic = ( friso_dic_t ) FRISO_CALLOC( sizeof( friso_hash_t ), __FRISO_LEXICON_LENGTH__ ); if ( dic == NULL ) { ___ALLOCATION_ERROR___ } for ( t = 0; t < __FRISO_LEXICON_LENGTH__; t++ ) { dic[t] = new_hash_table(); } return dic; }
int main(int argc, char ** argv){ FILE * inputfile; if(argc != 2){ printf("USAGE: %s input_parsed_file\n", argv[0]); return 1; } inputfile=fopen(argv[1], "r"); authors_dict = new_hash_table(AUTHORS_HASH_DIM, author_node); artcl_graph = new_graph(article_node); read_file(inputfile); fclose(inputfile); interface(); return 0; }
char lisp_remove_symbol(const char* name) { if(!lisp_symtbl) lisp_symtbl=new_hash_table(LPSYMTBLSIZE,hash_symbol); return hash_table_remove(lisp_symtbl,name)?1:0; }
/* * Initialize data structures */ int init_trusted(void) { /* Check if hash table needs to be loaded from trusted table */ if (!db_url.s) { LM_INFO("db_url parameter of permissions module not set, " "disabling allow_trusted\n"); return 0; } else { if (db_bind_mod(&db_url, &perm_dbf) < 0) { LM_ERR("load a database support module\n"); return -1; } if (!DB_CAPABILITY(perm_dbf, DB_CAP_QUERY)) { LM_ERR("database module does not implement 'query' function\n"); return -1; } } hash_table_1 = hash_table_2 = 0; hash_table = 0; if (db_mode == ENABLE_CACHE) { db_handle = perm_dbf.init(&db_url); if (!db_handle) { LM_ERR("unable to connect database\n"); return -1; } if(db_check_table_version(&perm_dbf, db_handle, &trusted_table, TABLE_VERSION) < 0) { LM_ERR("error during table version check.\n"); perm_dbf.close(db_handle); return -1; } hash_table_1 = new_hash_table(); if (!hash_table_1) return -1; hash_table_2 = new_hash_table(); if (!hash_table_2) goto error; hash_table = (struct trusted_list ***)shm_malloc (sizeof(struct trusted_list **)); if (!hash_table) goto error; *hash_table = hash_table_1; if (reload_trusted_table() == -1) { LM_CRIT("reload of trusted table failed\n"); goto error; } perm_dbf.close(db_handle); db_handle = 0; } return 0; error: if (hash_table_1) { free_hash_table(hash_table_1); hash_table_1 = 0; } if (hash_table_2) { free_hash_table(hash_table_2); hash_table_2 = 0; } if (hash_table) { shm_free(hash_table); hash_table = 0; } perm_dbf.close(db_handle); db_handle = 0; return -1; }
int main_RebuildTfst(int argc,char* const argv[]) { if (argc==1) { usage(); return SUCCESS_RETURN_CODE; } VersatileEncodingConfig vec=VEC_DEFAULT; int val, index=-1; bool only_verify_arguments = false; UnitexGetOpt options; int save_statistics=1; while (EOF!=(val=options.parse_long(argc,argv,optstring_RebuildTfst,lopts_RebuildTfst,&index))) { switch (val) { case 'k': if (options.vars()->optarg[0]=='\0') { error("Empty input_encoding argument\n"); return USAGE_ERROR_CODE; } decode_reading_encoding_parameter(&(vec.mask_encoding_compatibility_input),options.vars()->optarg); break; case 'q': if (options.vars()->optarg[0]=='\0') { error("Empty output_encoding argument\n"); return USAGE_ERROR_CODE; } decode_writing_encoding_parameter(&(vec.encoding_output),&(vec.bom_output),options.vars()->optarg); break; case 'S': save_statistics = 0; break; case 'V': only_verify_arguments = true; break; case 'h': usage(); return SUCCESS_RETURN_CODE; case ':': index==-1 ? error("Missing argument for option -%c\n", options.vars()->optopt) : error("Missing argument for option --%s\n", lopts_RebuildTfst[index].name); return USAGE_ERROR_CODE; case '?': index==-1 ? error("Invalid option -%c\n", options.vars()->optopt) : error("Invalid option --%s\n", options.vars()->optarg); return USAGE_ERROR_CODE; } index=-1; } if (options.vars()->optind!=argc-1) { error("Invalid arguments: rerun with --help\n"); return USAGE_ERROR_CODE; } if (only_verify_arguments) { // freeing all allocated memory return SUCCESS_RETURN_CODE; } char input_tfst[FILENAME_MAX]; char input_tind[FILENAME_MAX]; strcpy(input_tfst,argv[options.vars()->optind]); remove_extension(input_tfst,input_tind); strcat(input_tind,".tind"); u_printf("Loading %s...\n",input_tfst); Tfst* tfst = open_text_automaton(&vec,input_tfst); if (tfst==NULL) { error("Unable to load %s automaton\n",input_tfst); return DEFAULT_ERROR_CODE; } char basedir[FILENAME_MAX]; get_path(input_tfst,basedir); char output_tfst[FILENAME_MAX]; sprintf(output_tfst, "%s.new.tfst",input_tfst); char output_tind[FILENAME_MAX]; sprintf(output_tind, "%s.new.tind",input_tfst); U_FILE* f_tfst; if ((f_tfst = u_fopen(&vec,output_tfst,U_WRITE)) == NULL) { error("Unable to open %s for writing\n", output_tfst); close_text_automaton(tfst); return DEFAULT_ERROR_CODE; } U_FILE* f_tind; if ((f_tind = u_fopen(BINARY,output_tind,U_WRITE)) == NULL) { u_fclose(f_tfst); close_text_automaton(tfst); error("Unable to open %s for writing\n", output_tind); return DEFAULT_ERROR_CODE; } /* We use this hash table to rebuild files tfst_tags_by_freq/alph.txt */ struct hash_table* form_frequencies=new_hash_table((HASH_FUNCTION)hash_unichar,(EQUAL_FUNCTION)u_equal, (FREE_FUNCTION)free,NULL,(KEYCOPY_FUNCTION)keycopy); u_fprintf(f_tfst,"%010d\n",tfst->N); for (int i = 1; i <= tfst->N; i++) { if ((i % 100) == 0) { u_printf("%d/%d sentences rebuilt...\n", i, tfst->N); } load_sentence(tfst,i); char grfname[FILENAME_MAX]; sprintf(grfname, "%ssentence%d.grf", basedir, i); unichar** tags=NULL; int n_tags=-1; if (fexists(grfname)) { /* If there is a .grf for the current sentence, then we must * take it into account */ if (0==pseudo_main_Grf2Fst2(&vec,grfname,0,NULL,1,1,NULL,NULL,0)) { /* We proceed only if the graph compilation was a success */ char fst2name[FILENAME_MAX]; sprintf(fst2name, "%ssentence%d.fst2", basedir, i); struct FST2_free_info fst2_free; Fst2* fst2=load_abstract_fst2(&vec,fst2name,0,&fst2_free); af_remove(fst2name); free_SingleGraph(tfst->automaton,NULL); tfst->automaton=create_copy_of_fst2_subgraph(fst2,1); tags=create_tfst_tags(fst2,&n_tags); free_abstract_Fst2(fst2,&fst2_free); } else { error("Error: %s is not a valid sentence automaton\n",grfname); } } save_current_sentence(tfst,f_tfst,f_tind,tags,n_tags,form_frequencies); if (tags!=NULL) { /* If necessary, we free the tags we created */ for (int count_tags=0;count_tags<n_tags;count_tags++) { free(tags[count_tags]); } free(tags); } } u_printf("Text automaton rebuilt.\n"); u_fclose(f_tind); u_fclose(f_tfst); close_text_automaton(tfst); /* Finally, we save statistics */ if (save_statistics) { char tfst_tags_by_freq[FILENAME_MAX]; char tfst_tags_by_alph[FILENAME_MAX]; strcpy(tfst_tags_by_freq, basedir); strcat(tfst_tags_by_freq, "tfst_tags_by_freq.txt"); strcpy(tfst_tags_by_alph, basedir); strcat(tfst_tags_by_alph, "tfst_tags_by_alph.txt"); U_FILE* f_tfst_tags_by_freq = u_fopen(&vec, tfst_tags_by_freq, U_WRITE); if (f_tfst_tags_by_freq == NULL) { error("Cannot open %s\n", tfst_tags_by_freq); } U_FILE* f_tfst_tags_by_alph = u_fopen(&vec, tfst_tags_by_alph, U_WRITE); if (f_tfst_tags_by_alph == NULL) { error("Cannot open %s\n", tfst_tags_by_alph); } sort_and_save_tfst_stats(form_frequencies, f_tfst_tags_by_freq, f_tfst_tags_by_alph); u_fclose(f_tfst_tags_by_freq); u_fclose(f_tfst_tags_by_alph); } free_hash_table(form_frequencies); /* make a backup and replace old automaton with new */ char backup_tfst[FILENAME_MAX]; char backup_tind[FILENAME_MAX]; sprintf(backup_tfst,"%s.bck",input_tfst); sprintf(backup_tind,"%s.bck",input_tind); /* We remove the existing backup files, if any */ af_remove(backup_tfst); af_remove(backup_tind); af_rename(input_tfst,backup_tfst); af_rename(input_tind,backup_tind); af_rename(output_tfst,input_tfst); af_rename(output_tind,input_tind); u_printf("\nYou can find a backup of the original files in:\n %s\nand %s\n", backup_tfst,backup_tind); return SUCCESS_RETURN_CODE; }
int main_Tokenize(int argc,char* const argv[]) { if (argc==1) { usage(); return 0; } char alphabet[FILENAME_MAX]=""; char token_file[FILENAME_MAX]=""; Encoding encoding_output = DEFAULT_ENCODING_OUTPUT; int bom_output = DEFAULT_BOM_OUTPUT; int mask_encoding_compatibility_input = DEFAULT_MASK_ENCODING_COMPATIBILITY_INPUT; int val,index=-1; int mode=NORMAL; struct OptVars* vars=new_OptVars(); while (EOF!=(val=getopt_long_TS(argc,argv,optstring_Tokenize,lopts_Tokenize,&index,vars))) { switch(val) { case 'a': if (vars->optarg[0]=='\0') { fatal_error("You must specify a non empty alphabet file name\n"); } strcpy(alphabet,vars->optarg); break; case 'c': mode=CHAR_BY_CHAR; break; case 'w': mode=NORMAL; break; case 't': if (vars->optarg[0]=='\0') { fatal_error("You must specify a non empty token file name\n"); } strcpy(token_file,vars->optarg); break; case 'k': if (vars->optarg[0]=='\0') { fatal_error("Empty input_encoding argument\n"); } decode_reading_encoding_parameter(&mask_encoding_compatibility_input,vars->optarg); break; case 'q': if (vars->optarg[0]=='\0') { fatal_error("Empty output_encoding argument\n"); } decode_writing_encoding_parameter(&encoding_output,&bom_output,vars->optarg); break; case 'h': usage(); return 0; case ':': if (index==-1) fatal_error("Missing argument for option -%c\n",vars->optopt); else fatal_error("Missing argument for option --%s\n",lopts_Tokenize[index].name); case '?': if (index==-1) fatal_error("Invalid option -%c\n",vars->optopt); else fatal_error("Invalid option --%s\n",vars->optarg); break; } index=-1; } if (vars->optind!=argc-1) { fatal_error("Invalid arguments: rerun with --help\n"); } U_FILE* text; U_FILE* out; U_FILE* output; U_FILE* enter; char tokens_txt[FILENAME_MAX]; char text_cod[FILENAME_MAX]; char enter_pos[FILENAME_MAX]; Alphabet* alph=NULL; get_snt_path(argv[vars->optind],text_cod); strcat(text_cod,"text.cod"); get_snt_path(argv[vars->optind],tokens_txt); strcat(tokens_txt,"tokens.txt"); get_snt_path(argv[vars->optind],enter_pos); strcat(enter_pos,"enter.pos"); text=u_fopen_existing_versatile_encoding(mask_encoding_compatibility_input,argv[vars->optind],U_READ); if (text==NULL) { fatal_error("Cannot open text file %s\n",argv[vars->optind]); } if (alphabet[0]!='\0') { alph=load_alphabet(alphabet); if (alph==NULL) { error("Cannot load alphabet file %s\n",alphabet); u_fclose(text); return 1; } } out=u_fopen(BINARY,text_cod,U_WRITE); if (out==NULL) { error("Cannot create file %s\n",text_cod); u_fclose(text); if (alph!=NULL) { free_alphabet(alph); } return 1; } enter=u_fopen(BINARY,enter_pos,U_WRITE); if (enter==NULL) { error("Cannot create file %s\n",enter_pos); u_fclose(text); u_fclose(out); if (alph!=NULL) { free_alphabet(alph); } return 1; } vector_ptr* tokens=new_vector_ptr(4096); vector_int* n_occur=new_vector_int(4096); vector_int* n_enter_pos=new_vector_int(4096); struct hash_table* hashtable=new_hash_table((HASH_FUNCTION)hash_unichar,(EQUAL_FUNCTION)u_equal, (FREE_FUNCTION)free,NULL,(KEYCOPY_FUNCTION)keycopy); if (token_file[0]!='\0') { load_token_file(token_file,mask_encoding_compatibility_input,tokens,hashtable,n_occur); } output=u_fopen_creating_versatile_encoding(encoding_output,bom_output,tokens_txt,U_WRITE); if (output==NULL) { error("Cannot create file %s\n",tokens_txt); u_fclose(text); u_fclose(out); u_fclose(enter); if (alph!=NULL) { free_alphabet(alph); } free_hash_table(hashtable); free_vector_ptr(tokens,free); free_vector_int(n_occur); free_vector_int(n_enter_pos); return 1; } u_fprintf(output,"0000000000\n"); int SENTENCES=0; int TOKENS_TOTAL=0; int WORDS_TOTAL=0; int DIGITS_TOTAL=0; u_printf("Tokenizing text...\n"); if (mode==NORMAL) { normal_tokenization(text,out,output,alph,tokens,hashtable,n_occur,n_enter_pos, &SENTENCES,&TOKENS_TOTAL,&WORDS_TOTAL,&DIGITS_TOTAL); } else { char_by_char_tokenization(text,out,output,alph,tokens,hashtable,n_occur,n_enter_pos, &SENTENCES,&TOKENS_TOTAL,&WORDS_TOTAL,&DIGITS_TOTAL); } u_printf("\nDone.\n"); save_new_line_positions(enter,n_enter_pos); u_fclose(enter); u_fclose(text); u_fclose(out); u_fclose(output); write_number_of_tokens(tokens_txt,encoding_output,bom_output,tokens->nbelems); // we compute some statistics get_snt_path(argv[vars->optind],tokens_txt); strcat(tokens_txt,"stats.n"); output=u_fopen_creating_versatile_encoding(encoding_output,bom_output,tokens_txt,U_WRITE); if (output==NULL) { error("Cannot write %s\n",tokens_txt); } else { compute_statistics(output,tokens,alph,SENTENCES,TOKENS_TOTAL,WORDS_TOTAL,DIGITS_TOTAL); u_fclose(output); } // we save the tokens by frequence get_snt_path(argv[vars->optind],tokens_txt); strcat(tokens_txt,"tok_by_freq.txt"); output=u_fopen_creating_versatile_encoding(encoding_output,bom_output,tokens_txt,U_WRITE); if (output==NULL) { error("Cannot write %s\n",tokens_txt); } else { sort_and_save_by_frequence(output,tokens,n_occur); u_fclose(output); } // we save the tokens by alphabetical order get_snt_path(argv[vars->optind],tokens_txt); strcat(tokens_txt,"tok_by_alph.txt"); output=u_fopen_creating_versatile_encoding(encoding_output,bom_output,tokens_txt,U_WRITE); if (output==NULL) { error("Cannot write %s\n",tokens_txt); } else { sort_and_save_by_alph_order(output,tokens,n_occur); u_fclose(output); } free_hash_table(hashtable); free_vector_ptr(tokens,free); free_vector_int(n_occur); free_vector_int(n_enter_pos); if (alph!=NULL) { free_alphabet(alph); } free_OptVars(vars); return 0; }
Parser::Parser() { extern "C" { _hash_table = new_hash_table(HASH_TABLE_INIT_SIZE); }
void egl_state_init (egl_state_t *state, EGLDisplay display, EGLContext context) { state->context = context; state->display = display; state->drawable = EGL_NO_SURFACE; state->readable = EGL_NO_SURFACE; state->vendor_string = NULL; state->renderer_string = NULL; state->version_string = NULL; state->shading_language_version_string = NULL; state->extensions_string = NULL; state->share_context = NULL; state->contexts_sharing = 0; state->active = false; state->destroy_dpy = false; state->destroy_ctx = false; state->destroy_draw = false; state->destroy_read = false; state->vertex_attribs.count = 0; state->vertex_attribs.enabled_count = 0; state->vertex_attribs.attribs = state->vertex_attribs.embedded_attribs; state->vertex_attribs.enabled_attribs = NULL; state->max_combined_texture_image_units = 8; state->max_vertex_attribs_queried = false; state->max_vertex_attribs = 8; state->max_cube_map_texture_size = 16; state->max_fragment_uniform_vectors = 16; state->max_fragment_uniform_vectors_queried = false; state->max_renderbuffer_size = 1; state->max_renderbuffer_size_queried = false; state->max_texture_image_units = 8; state->max_texture_image_units_queried = false; state->max_texture_size = 64; state->max_texture_size_queried = false; state->max_varying_vectors = 8; state->max_vertex_uniform_vectors = 128; state->max_vertex_texture_image_units = 0; state->max_texture_max_anisotropy_queried = false; state->max_texture_max_anisotropy = 2.0; state->error = GL_NO_ERROR; state->need_get_error = false; /* We add a head to the list so we can get a reference. */ state->shader_objects = NULL; state->active_texture = GL_TEXTURE0; state->array_buffer_binding = 0; state->vertex_array_binding = 0; state->blend = GL_FALSE; int i; for (i = 0; i < 4; i++) { state->blend_color[i] = GL_ZERO; state->blend_dst[i] = GL_ZERO; state->blend_src[i] = GL_ONE; } state->blend_equation[0] = state->blend_equation[1] = GL_FUNC_ADD; memset (state->color_clear_value, 0, sizeof (GLfloat) * 4); for (i = 0; i < 4; i++) state->color_writemask[i] = GL_TRUE; state->cull_face = GL_FALSE; state->cull_face_mode = GL_BACK; state->current_program = 0; state->depth_clear_value = 1; state->depth_func = GL_LESS; state->depth_range[0] = 0; state->depth_range[1] = 1; state->depth_test = GL_FALSE; state->depth_writemask = GL_TRUE; state->dither = GL_TRUE; state->element_array_buffer_binding = 0; state->framebuffer_binding = 0; state->renderbuffer_binding = 0; state->front_face = GL_CCW; state->generate_mipmap_hint = GL_DONT_CARE; state->line_width = 1; state->pack_alignment = 4; state->unpack_alignment = 4; state->unpack_row_length = 0; state->unpack_skip_pixels = 0; state->unpack_skip_rows = 0; state->polygon_offset_factor = 0; state->polygon_offset_fill = GL_FALSE; state->polygon_offset_units = 0; state->sample_alpha_to_coverage = 0; state->sample_coverage = GL_FALSE; memset (state->scissor_box, 0, sizeof (GLint) * 4); state->scissor_test = GL_FALSE; /* XXX: should we set this */ state->shader_compiler = GL_TRUE; state->stencil_back_fail = GL_KEEP; state->stencil_back_func = GL_ALWAYS; state->stencil_back_pass_depth_fail = GL_KEEP; state->stencil_back_pass_depth_pass = GL_KEEP; state->stencil_back_ref = 0; memset (&state->stencil_back_value_mask, 1, sizeof (GLint)); state->stencil_clear_value = 0; state->stencil_fail = GL_KEEP; state->stencil_func = GL_ALWAYS; state->stencil_pass_depth_fail = GL_KEEP; state->stencil_pass_depth_pass = GL_KEEP; state->stencil_ref = 0; state->stencil_test = GL_FALSE; memset (&state->stencil_value_mask, 1, sizeof (GLint)); memset (&state->stencil_writemask, 1, sizeof (GLint)); memset (&state->stencil_back_writemask, 1, sizeof (GLint)); memset (state->texture_binding, 0, sizeof (GLint) * 2); memset (state->viewport, 0, sizeof (GLint) * 4); state->buffer_size[0] = state->buffer_size[1] = 0; state->buffer_usage[0] = state->buffer_usage[1] = GL_STATIC_DRAW; state->texture_cache = new_hash_table(free); state->framebuffer_cache = new_hash_table (free); state->renderbuffer_cache = new_hash_table (free); state->shader_objects_name_handler = name_handler_create (); state->texture_name_handler = name_handler_create (); state->framebuffer_name_handler = name_handler_create (); state->renderbuffer_name_handler = name_handler_create (); state->buffer_name_handler = name_handler_create (); state->supports_element_index_uint = false; state->supports_bgra = false; }
int main(int argc, char ** argv) { struct Node * head = NULL; char index_file_name[1024]; FILE * index_fd = NULL; char file_or_dir[1024]; char cwd[1024]; DIR *dp; struct dirent *ep; struct hash_table * table = new_hash_table(256); struct stat sb; if (argc != 3) { puts("usage:\n\tindexer <index file> <dir or file>"); return 0; } getcwd(file_or_dir, sizeof(file_or_dir)); strcat(file_or_dir, "/"); strcat(file_or_dir, argv[2]); index_fd = fopen(argv[1], "w+"); /* check if the index file exists to make a hash table from it */ if (index_fd == NULL) { fprintf(stderr, "could not open or create an index file %s\n", argv[1]); return 1; } /* if a folder is in the local dir, use it. otherwise assume it's an absolute dir */ printf("checking for %s\n", file_or_dir); stat(file_or_dir, &sb); if ( S_ISREG(sb.st_mode) ) { index_file(file_or_dir, table); } else if ( S_ISDIR(sb.st_mode) ) { dp = opendir (argv[2]); if ((ep = readdir (dp)) != NULL && ep->d_type == DT_DIR) { strcpy(cwd, argv[2]); } else { strcat(cwd, "/"); strcat(cwd, argv[2]); } head = get_files_in_folder(head, cwd); while (head) { index_file(head->data, table); head = head->next; } } else { fprintf(stderr, "no such directory of file found: %s\n", file_or_dir); return 1; } write_contents(table, index_fd); free_hash_table(table); fclose(index_fd); return 0; }