void lexer(char *line, t_lex *lst) { int param[3]; param[2] = 0; while (*line) { no_space(&line); if (!*line) break ; if ((is_token(line, param))) { add_token(&lst, param); line = line + param[1]; } else if ((is_word(line, param))) { add_word(&lst, param, line); line = line + param[1]; } else { add_word(&lst, NULL, line); line++; } } parser(&lst); }
t_s2l *ft_strsplitlst(const char *s, char c) { t_s2l *out; int i; int start; i = 0; out = NULL; start = i = skip_char(i, s, c); while (s[i]) { if (s[i] == c) { add_word(&out, s, start, i - start); start = i = skip_char(i, s, c); } else i++; } if (out == NULL) ft_s2lpushback(&out, ft_s2lcreate(s)); else if (i != start) add_word(&out, s, start, i - start); return (out); }
int main() { struct keys *dict = build_keys_dict(); add_word(dict, "Hello"); add_word(dict, "World!"); add_word(dict, "World!"); add_word(dict, "World!"); print_dict(dict); }
char *justify(char **original_words, uint8_t word_count) { char **words = (char **) calloc(word_count, sizeof(char *)); memcpy(words, original_words, word_count * sizeof(char *)); char *result = (char *) calloc(MAX_MESSAGE_LENGTH + 1, sizeof(char)); justify_state state; state.cursor = result; state.line_index = 0; state.line_length = 0; for (uint8_t i = 0; i < word_count; ++i) { char *word = words[i]; uint8_t word_length = strlen(word); uint8_t words_left = word_count - i; uint8_t line_chars_left = LINE_LENGTH - state.line_length; // If there are already words on the current line, count the space needed after them. if (state.line_length > 0 && line_chars_left > 0) { line_chars_left -= 1; } uint8_t lines_below = LINE_COUNT - (state.line_index + 1); uint8_t room_below = lines_below * LINE_LENGTH; // If word fits on current line. if (word_length <= line_chars_left) { add_word(word, word_length, &state); } // If word does not fit on current line but can be moved to next line. else if (room_needed(words + i, words_left) <= room_below && word_length <= LINE_LENGTH) { add_newline(&state); add_word(word, word_length, &state); } // Otherwise, word must be broken. else { uint8_t first_part_length = line_chars_left; add_word(word, first_part_length, &state); add_newline(&state); // Move current string pointer and decrement word index, // "tricking" the program into thinking that we've inserted a new word. words[i] += first_part_length; --i; } } free(words); return result; }
/* An email reference. */ void cm_email (int arg) { if (arg == START) { char *addr = get_xref_token (1); /* expands all macros in email */ char *name = get_xref_token (0); if (xml && docbook) { xml_insert_element_with_attribute (EMAIL, START, "url=\"mailto:%s\"", addr); if (*name) execute_string ("%s", name); xml_insert_element (EMAIL, END); } else if (xml) { xml_insert_element (EMAIL, START); xml_insert_element (EMAILADDRESS, START); execute_string ("%s", addr); xml_insert_element (EMAILADDRESS, END); if (*name) { xml_insert_element (EMAILNAME, START); execute_string ("%s", name); xml_insert_element (EMAILNAME, END); } xml_insert_element (EMAIL, END); } else if (html) { add_html_elt ("<a href="); /* don't collapse `--' etc. in the address */ in_fixed_width_font++; execute_string ("\"mailto:%s\"", addr); in_fixed_width_font--; add_word (">"); execute_string ("%s", *name ? name : addr); add_word ("</a>"); } else { execute_string ("%s%s", name, *name ? " " : ""); in_fixed_width_font++; execute_string ("<%s>", addr); in_fixed_width_font--; } if (addr) free (addr); if (name) free (name); } }
node *add_word(node * cur, char * key){ if (cur == NULL) // base case return new_node(key, 1); if (strcmp(key, cur->key) == 0) // increment value if word found in tree (cur->value)++; else if (strcmp(key, cur->key) < 0) cur->left = add_word(cur->left, key); else cur->right = add_word(cur->right, key); return cur; }
/* * This function adds words in different orders and amounts to the AVL-tree. * After adding, it validates the AVL-tree. Function prints the results to stdout. */ void run_test() { node* root; FILE* out; root = NULL; int i; char* test_strings[] = {"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"}; if ((out = fopen("tree_test_result.txt", "w")) == NULL) { fprintf(stderr, "Cannot write to output file tree_test_result\n"); exit(1); } /*Add words in increasing order*/ printf("\nAdd words in increasing order:\n"); for (i = 0; i < 11; i++) { root = add_word(root, test_strings[i]); } printf("Run validation\n"); validate_tree(root); printf("Test passed\n"); print_words(root, out); destroy(root); /*Add words in decreasing order*/ root = NULL; printf("\nAdd words in decreasing order:\n"); for (i = 10; i >= 0; i--) { root = add_word(root, test_strings[i]); } printf("Run validation\n"); validate_tree(root); printf("Test passed\n"); print_words(root, out); destroy(root); /*Add only one word*/ root = NULL; printf("\nAdd single word:\n"); root = add_word(root, test_strings[0]); printf("Run validation\n"); validate_tree(root); printf("Test passed\n"); print_words(root, out); destroy(root); fclose(out); exit(0); }
void test() { struct trie t; trie_clear(&t); add_word(&t, "a"); add_word(&t, "a"); add_word(&t, "b1"); add_word(&t, "b2"); add_word(&t, "b13"); printf("%s\n", (char *) get_value(&t, "b2")); printf("%s\n", (char *) get_value(&t, "b1")); printf("%s\n", (char *) get_value(&t, "b13")); printf("%s\n", (char *) get_value(&t, "b")); printf("%s\n", (char *) get_value(&t, "")); }
void tick_test2(void) { int i; #define START 40 #define DELAY 50 if(looper2 == START) add_word(msg1); if(looper2 == START+DELAY) add_word(msg2); if(looper2 == START+(DELAY*2)) add_word(msg3); if(looper2 == START+(DELAY*3)) add_word(msg4); if(looper2 == START+(DELAY*4)) add_word(msg5); looper2++; for(i=0;i<128;i++) { back[i] = test2_tab[(i+10) + (128 * theta) ]; } if(sprite_ram_mutex == 0) { clear_sprites(); tick_words(); sprite_ram_mutex = 1; } flip_flop^=1; if(flip_flop == 1) { front = dist_map; back = dist_map2; } else { front = dist_map2; back = dist_map; } /* theta++; if(theta == 0) theta = 1; if(theta==0xff) theta = 1; */ }
int parse_sentence(Cellule* tab, long size_of_tab, unsigned char* sentence, unsigned short sentence_len, unsigned long sentence_pos, Liste* alphabetical_word_list) { /* Here we have a problem, since we have to count also the word index. So i will rewrite this func */ unsigned char* mot = calloc(sentence_len, sizeof(unsigned char)); unsigned short i = 0; unsigned short mot_index = 0; while(sentence[i] != '\0') { if(sentence[i] == ' ' || sentence[i] == ',' || sentence[i] == ';' || sentence[i] == '\n'){ /* the we have to add the current word to the list, and cleanup the current word*/ if(strcmp((const char*) mot, "") != 0 && strcmp((const char*) mot, " ") != 0 && strcmp((const char*) mot, ".") != 0) { add_word(tab, size_of_tab, mot, i + 1, sentence_pos, alphabetical_word_list); mot_index = 0; for(; mot_index<sentence_len; mot_index++) mot[mot_index] = 0; mot_index = 0; } else i++; } else { mot[mot_index++] = sentence[i++]; } } free(mot); return 1; }
int main(int argc, char **argv){ char buf[WORD_LEN_MAX]; FILE *file; if(argc == 1){ file = stdin; }else{ file =fopen(argv[1], "r"); if(file == NULL){ fprintf(stderr,"%s %s can not open",argv[0],argv[1]); exit(1); } } /* 单词管理模块初始化*/ word_initialize(); while(getWord(buf, WORD_LEN_MAX, file) != EOF){ add_word(buf); } dump_word(stdout); word_finalize(); return 0; }
/* shortcontents in HTML; Should this produce a standalone file? */ static void shortcontents_update_html (char *contents_filename) { int i; char *toc_file = NULL; /* does exist any toc? */ if (!toc_counter) return; add_html_block_elt_args ("\n<div class=\"shortcontents\">\n<h2>%s</h2>\n<ul>\n", gdt("Short Contents")); if (contents_filename) toc_file = filename_part (contents_filename); for (i = 0; i < toc_counter; i++) { char *name = toc_entry_alist[i]->name; if (toc_entry_alist[i]->level == 0) { if (contents_filename) add_word_args ("<li><a href=\"%s#toc_%s</a></li>\n", splitting ? toc_file : "", name); else add_word_args ("<a href=\"%s#%s</a>\n", splitting ? toc_entry_alist[i]->html_file : "", name); } } add_word ("</ul>\n</div>\n\n"); if (contents_filename) free (toc_file); }
int load_tables(FILE * wdlst) { char * ap; char ts[MAX_LN_LEN]; /* first read the first line of file to get hash table size */ if (! fgets(ts, MAX_LN_LEN-1,wdlst)) return 2; mychomp(ts); tablesize = atoi(ts); tablesize = tablesize + 5; if ((tablesize %2) == 0) tablesize++; /* allocate the hash table */ tableptr = (struct hentry *) calloc(tablesize, sizeof(struct hentry)); if (! tableptr) return 3; /* loop thorugh all words on much list and add to hash * table and store away word and affix strings in tmpfile */ while (fgets(ts,MAX_LN_LEN-1,wdlst)) { mychomp(ts); ap = mystrdup(ts); add_word(ap); } return 0; }
int main(int argc, char **argv){ char buf[WORD_LEN_MAX]; FILE *fp; if(argc ==1){ fp = stdin; }else{ fp = fopen(argv[1], "r"); if(fp == NULL){ fprintf(stderr, "%s:%s can not open.\n",argv[0],argv[1]); exit(1); } } /*initialize word manage*/ word_initialize(); /*read the file, meantime,add the word*/ while(get_word(buf, WORD_LEN_MAX, fp)!= EOF){ add_word(buf); } /*print times of the word*/ dump_word(stdout); /*end the word manage*/ word_finalize(); return 0; }
int addStringToHash(char *filename, char *string) { if (string == NULL || filename == NULL) return 1; TokenizerAlphaNumT* tokenizer; tokenizer = TKANCreate(string); if(tokenizer == NULL) { printf("Error: unable to create tokenizer\n"); return 1; } char* token = NULL; /* while((token = TKANGetNextToken(tokenizer)) != NULL) { */ while((token = TKANGetNextToken(tokenizer)) != NULL) { printf("BEFORE LOWER CASE %s\n",token); int i; for (i = 0; token[i]; i++) token[i] = tolower(token[ i ]); printf("AFTER LOWER CASE %s\n",token); /*Add to hash here*/ add_word(filename, token, tokenizer->copied_string); /* printf("%s", token); */ free(token); } if (tokenizer != NULL) TKANDestroy(tokenizer); return 0; }
int main(int argc, char **argv) { char buf[WORD_LEN_MAX]; FILE *fp; if(argc == 1){ fp = stdin; }else{ fp = fopen(argv[1], "r"); if(fp == NULL){ fprintf(stderr, "%s:%s can not open.\n", argv[0],argv[1]); exit(1); } } word_initialize(); while(get_word(buf, WORD_LEN_MAX, fp) != EOF){ add_word(buf); } dump_word(stdout); word_finalize(); return 0; }
int main(int argc, char *argv[]) { pnode_t phead = NULL; int choice = 0; phead = create_link(); #ifndef __NDEBUG__ print_link(phead); #endif while (1) { #ifdef __NDEBUG__ system("clear"); #endif choice = menu(); if (choice == SEARCH_WORD) search_for_word(phead); else if (choice == ADD_WORD) phead = add_word(phead); else if (choice == EXIT_DICT) break; } free_link(phead); return 0; }
Node *index_file(Node *head, char *fname, char **filenames) { char line[MAXLINE]; char *marker, *token; int countlines = 0; FILE *fp; if((fp = fopen(fname, "r")) == NULL) { perror(fname); exit(1); } while((fgets(line, MAXLINE, fp)) != NULL) { countlines++; if((countlines % 1000) == 0) { printf("processed %d lines from %s (words%d)\n", countlines, fname, num_words); } line[strlen(line)-1] = '\0'; if(strlen(line) == 0) { continue; } marker = line; while((token = strsep(&marker, " \t")) != NULL) { if(strlen(token) == 0) { continue; } token = remove_punc(token); if((strlen(token) <= 3) || isdigit(*token)) { continue; } if(*token != '\0') { head = add_word(head, filenames, token, fname); } } } return head; }
void fill_tab(char **tab, char *str) { int i; int j; int word; i = 0; word = 0; while (str[i] != '\0') { if (flag_delim(str[i]) == 0) { j = 0; while (flag_delim(str[i + j]) == 0) j++; tab[word] = (char*)malloc(sizeof(char) * (j + 1)); add_word((tab[word]), (str + i)); word++; i = i + j; } else i++; } tab[word] = NULL; }
BasicTrie::BasicTrie( const std::string& word, unsigned int frequency ) { frequency_ = 0; if ( !word.empty() ) add_word( word , frequency ); else frequency_ = frequency; }
void LapeedTrends::increaseCount(std::string s, unsigned int amount) { const auto iterator = word_count.find(s); if (iterator == word_count.end()) { add_word(s, amount); } else { increment_word(s, amount, iterator); } }
const std::shared_ptr<Msg> WordRecord::build_same(Serializer *ser) const { auto c2c = std::make_shared<WordRecord>(); if(ser->get_len()) { int word_number = ser->get_int32();; for(int i = 0; i < word_number; i++) c2c->add_word(ser->get_string()); } return c2c; }
int main(int argc, char **argv) { Node *head = NULL; char **filenames = init_filenames(); char ch; char *indexfile = "index"; char *namefile = "filenames"; while((ch = getopt(argc, argv, "i:n:")) != -1) { switch (ch) { case 'i': indexfile = optarg; break; case 'n': namefile = optarg; break; default: fprintf(stderr, "Usage: indexfile [-i FILE] [-n FILE ] FILE...\n"); exit(1); } } while(optind < argc) { FILE *fname; if((fname = fopen(argv[optind], "r")) == NULL) { perror("Name file"); exit(1); } char line[MAXLINE]; char splitBy[] = " \t\n"; char *token; char *cleaned_token; while ((fgets(line, MAXLINE, fname)) != NULL){ token = strtok(line, splitBy); while (token != NULL) { cleaned_token = clean_word(token); //only add_word if not empty string if (strcmp(cleaned_token, "") != 0) { head = add_word(head, filenames, cleaned_token, argv[optind]); } token = strtok(NULL, splitBy);} } optind++; } write_list(namefile, indexfile, head, filenames); display_list(head, filenames); return 0; }
bool khrn_fmem_add_special(KHRN_FMEM_T *fmem, uint8_t **p, uint32_t special_i, uint32_t offset) { KHRN_FMEM_TWEAK_T *tw; tw = tweak_next(fmem, &fmem->special); if (!tw) return false; tw->special.location = *p; tw->special.special_i = special_i; add_word(p, offset); return true; }
int main(int argc, char **argv) { // First and only argument is the filename of the word list to use for our // dictionary if (argc != 2) { fprintf(stderr, "Usage: %s wordlist\n", argv[0]); return 1; } FILE *f = fopen(argv[1], "r"); if (f == NULL) { fprintf(stderr, "%s: %s\n", argv[1], strerror(errno)); return 1; } Node *root = make_node(); // Read in each word in the word list and add it to our trie char word[255]; while (fgets(word, DIM(word), f)) { add_word(root, word); } fclose(f); printf("[finished reading world list]\n"); // Search for words starting at each start location for (int i = 0; i < DIM(startLocs); i++) { word[0] = 0; bool visited[N][N] = {{0}}; int r = startLocs[i][0]; int c = startLocs[i][1]; int ch = grid[r][c]; search(r, c, root->children[ch - 'a'], word, 0, visited); printf("\n"); } // We could free the trie here, but doing so node-by-node is very slow. We // could instead use a pool allocator for the nodes and free the entire pool // at once, but there's not much point in doing that for such a simple // program as this. Instead, we just leak the memory and let the OS clean // up after us, which is very fast. //free_node(root); return 0; }
static void read_word(ParseInfo pi) { char c; bool reading = true; ojc_reader_backup(&pi->rd); ojc_reader_protect(&pi->rd); while (reading) { c = ojc_reader_get(&pi->err, &pi->rd); switch (c) { case ',': case ']': case '}': case ' ': case '\t': case '\f': case '\n': case '\r': ojc_reader_backup(&pi->rd); reading = false; break; case '\0': reading = false; break; default: break; } } if (16 <= pi->rd.tail - pi->rd.start) { // TBD sizeof _Str.ca if ('\0' == c) { ojc_set_error_at(pi, OJC_INCOMPLETE_ERR, __FILE__, __LINE__, "invalid token"); } else { ojc_set_error_at(pi, OJC_PARSE_ERR, __FILE__, __LINE__, "invalid token"); } } else if (0 == strncmp("true", pi->rd.start, 4)) { add_value(pi, get_val(pi, OJC_TRUE)); } else if (0 == strncmp("false", pi->rd.start, 5)) { add_value(pi, get_val(pi, OJC_FALSE)); } else if (0 == strncmp("null", pi->rd.start, 4)) { add_value(pi, get_val(pi, OJC_NULL)); } else if (ojc_word_ok) { add_word(pi, pi->rd.start, pi->rd.tail - pi->rd.start); } else { if ('\0' == c) { ojc_set_error_at(pi, OJC_INCOMPLETE_ERR, __FILE__, __LINE__, "invalid token"); } else { ojc_set_error_at(pi, OJC_PARSE_ERR, __FILE__, __LINE__, "invalid token"); } } ojc_reader_release(&pi->rd); }
void command_add(FILE * f) { char word[130]; read_word(word, 130); uint64 index = find_word(f, word); if (0 == index) { // Слова нет, можно добавлять char * content = read_content(); add_word(f, word, content); free( content ); } else { // найденное слово запомним struct entry_t entry; read_entry(f, &entry, index, READ_ENTRY_WORD | READ_ENTRY_DO_SEEK); remove_words(f, entry.word); char * content = read_content(); add_word(f, entry.word, content); free( content ); free_entry(&entry); //TODO( "Прочитать толкование, запомнить слово, вставить новую, удалить старую." ); } }
bool khrn_fmem_add_fix_special_0(KHRN_FMEM_T *fmem, uint8_t **p, MEM_HANDLE_T handle, uint32_t offset) { uint8_t *p2 = *p; if (!khrn_fmem_add_special(fmem, &p2, KHRN_FMEM_SPECIAL_0, 0)) return false; if (handle == MEM_INVALID_HANDLE) { add_word(p, offset); return true; } else { return khrn_fmem_add_fix(fmem, p, handle, offset); } }
void read_nonterminals(FILE *file) { char buffer[1000]; int n; while(fscanf(file,"%s",buffer)!=EOF) { n=add_word(buffer,&nt_lex); assert(n>=0); } TRACENT = find_word("NP-A-g",&nt_lex); assert(TRACENT>0); TRACETAG = find_word("NN",&nt_lex); assert(TRACETAG>0); }
int add_built_in_words(void) { if (!spell_builtin_added) { int i = 0; while (spell_builtin[i] != NULL) { if (add_word(spell_builtin[i])) return -1; i++; } spell_builtin_added = 1; } return 0; }