int addStringToHash(char *filename, char *string) { if (string == NULL || filename == NULL) return 1; TokenizerAlphaNumT* tokenizer; tokenizer = TKANCreate(string); if(tokenizer == NULL) { printf("Error: unable to create tokenizer\n"); return 1; } char* token = NULL; /* while((token = TKANGetNextToken(tokenizer)) != NULL) { */ while((token = TKANGetNextToken(tokenizer)) != NULL) { printf("BEFORE LOWER CASE %s\n",token); int i; for (i = 0; token[i]; i++) token[i] = tolower(token[ i ]); printf("AFTER LOWER CASE %s\n",token); /*Add to hash here*/ add_word(filename, token, tokenizer->copied_string); /* printf("%s", token); */ free(token); } if (tokenizer != NULL) TKANDestroy(tokenizer); return 0; }
int addToHash(char *filename, char* word){ if(filename == NULL || word == NULL){ return -1; // errors or empty pointers. } TokenizerANT tokenizer = TKANCreate(word); if(tokenizer == NULL){ printf("Error creating tokenizer\n"); return -1; } char* token = NULL; while((token = TKANGetNextToken(tokenizer)) != NULL){ //got to convert to lowercase for(int i = 0; token[i]; i++){ token[i] = tolower(token[i]); } //now add to hash addWord(filename, token, tokenizer->copied_string); // and free created token! free(token); } //check if tokeniser object is freed or not if(tokenizer != NULL){ TKANDestroy(tokenizer); } }