static int replace_end (char *word, const rule_list *rule) { register char *ending; /* set to start of possible stemmed suffix */ char tmp_ch; /* save replaced character when testing */ while (0 != rule->id) { ending = end - rule->old_offset; if (word <= ending) if (0 == strcmp (ending,rule->old_end)) { tmp_ch = *ending; *ending = EOS; if ((rule->min_root_size < word_size (word)) && (!rule->condition || (*rule->condition)(word))) { strcat (word, rule->new_end); end = ending + rule->new_offset; break; } *ending = tmp_ch; } rule++; } return (rule->id); }
void SignatureStream::next() { GUARANTEE(_num_param_words_processed <= _num_param_words, "sanity"); _word_index += word_size(); if (_num_param_words_processed < _num_param_words) { juint chr = (juint)_signature->byte_at(_position); if (chr < 0x80) { _type = TypeSymbol::primitive_field_basic_type_for(chr); _position ++; } else { _type = T_OBJECT; _current_class_id = _signature->decode_ushort_at(_position); if(!_fast) { JavaClass::Raw klass = Universe::class_from_id(_current_class_id); if (!klass.is_instance_class()) { _type = T_ARRAY; } } _position += 2; } _num_param_words_processed += word_size_for(_type); } else { _is_return_type = true; _type = _signature->return_type(_fast); _num_param_words_processed += 1; // because return type may be T_VOID! } }
void Metachunk::mangle(juint word_value) { // Overwrite the payload of the chunk and not the links that // maintain list of chunks. HeapWord* start = (HeapWord*)initial_top(); size_t size = word_size() - overhead(); Copy::fill_to_words(start, size, word_value); }
void Metachunk::print_on(outputStream* st) const { st->print_cr("Metachunk:" " bottom " PTR_FORMAT " top " PTR_FORMAT " end " PTR_FORMAT " size " SIZE_FORMAT, bottom(), top(), end(), word_size()); if (Verbose) { st->print_cr(" used " SIZE_FORMAT " free " SIZE_FORMAT, used_word_size(), free_word_size()); } }
static int remove_an_e (const char *word) { return ((1 == word_size (word)) && !ends_with_cvc (word)); }
static int add_an_e (const char *word) { return ((1 == word_size (word)) && ends_with_cvc(word)); }
void move_to(HeapWord* destination) { Memory::copy_words_aligned_overlapping(start(), destination, word_size()); }
// int fd_to_read_from1, int fd_to_read_from2... int main(int argc, char **argv) { int *read_splitter; int num_splitter=0; int builder_id=-1; int senders; int tableSize=1; WordNode **hashTable; WordNode *papa; int i, check; int more=0; long total_words=0, uniqueWords=0; struct tms tb1; struct pollfd *fds; int word_came=0; start = (double) times(&tb1); if ( builder_cmd_arg_parser(argc, argv, &builder_id, &num_splitter, &tableSize, &read_splitter) == -1 ) { return -1; } printf( "\nBuilder %d (pid: %d) : Parameters are: \nTable Size: %d \nSplitter File Descriptors (Read): " , builder_id+1, getpid(), tableSize); fflush(stdout); for(i=0; i<num_splitter; i++) printf("%d ", read_splitter[i]); printf("\n"); fflush(stdout); ///// Hash Table Create if ( (hashTable = malloc( tableSize*sizeof(WordNode*) ) ) == NULL ) { perror("malloc: builder hash table "); return -1; } for(i=0; i<tableSize; i++) if( ( hashTable[i] = word_create_list() ) == NULL ) return -1; // if( (papa = word_create_list() ) == NULL ) { return -1; } ///// if ( (fds = malloc(num_splitter*sizeof(struct pollfd)) ) == NULL ) { perror("malloc: builder fds "); return -1; } for(i=0; i<num_splitter; i++) { fds[i].fd = read_splitter[i]; fds[ i ].events = POLLRDNORM; } free(read_splitter); //return -1; double *splTime; char word[SIZE]; senders = num_splitter; if( (splTime = malloc(num_splitter*sizeof(double)) ) == NULL ) { perror("error: malloc for double *splTime "); return -1; } while (senders!=0) { int wsize=0; int wc; int put=0; check = poll( fds , num_splitter, 0 ); if(check == 0) continue; else if (check == -1) { perror("poll: builder "); return -1; } for (i=0; i<num_splitter; i++) { for(wc=0; wc<SIZE; wc++) word[wc] = '\0'; // If found data if( ( fds[i].revents & POLLRDNORM ) != 0 ) { // Read if( read_all(fds[i].fd, &wsize, sizeof(int)) == -1 ) { return -1; } if( wsize == -1 ) { // Read time of splitter if( read_all(fds[i].fd, splTime+i, sizeof(double) ) == -1 ) { return -1; } close(fds[i].fd); fds[i].fd = -1; senders--; //continue; } else { if( read_all(fds[i].fd, word ,wsize) != -1 ) { total_words++; put = hash_function(word, tableSize); //printf("builder: %s num: %lu \n" , word, total_words); fflush(stdout); word_insert( hashTable[ put ], word ); } else return -1; } } } } free(fds); total_words=0; for(i=0; i<tableSize; i++) { // printf("Words in list %d : %d\n", i+1, word_count_all(hashTable[i]) ); total_words += word_count_all(hashTable[i]); } for(i=0; i<tableSize; i++) { uniqueWords += word_size(hashTable[i]); } // printf("Sending to root\n\n"); if( send_to_root(builder_id, hashTable, tableSize, splTime, num_splitter) == -1 ) { perror("error: send to root "); return -1; } free(splTime); for(i=0; i<tableSize; i++) { word_delete_list(hashTable[i]); } free(hashTable); // printf("builder: %d: %lf sec\n", builder_id, totalTime); // Sent SIGUSR2 to parent if( kill(getppid(), SIGUSR2) == -1 ) { perror("builder - kill"); return -1; } if( kill(getppid(), SIGRTMIN+2) == -1 ) { perror("builder - kill"); return -1; } printf("\n\nBuilder %d (pid: %d) : Total Words read: %lu \nBuilder %d (pid: %d) : Unique words: %lu \nBuilder %d (pid: %d) : Terminating...\n", builder_id+1, getpid(), total_words, builder_id+1, getpid(), uniqueWords, builder_id+1, getpid()); return 0; }