void Pop() { struct ELEMENT * queueNode = getElementFromLink( (union LINK *) LinkedListPop( &queueList )); struct ELEMENT * stackNode = getElementFromLink( (union LINK *) LinkedListPop( &stackList )); struct ELEMENT * atomicNode = getElementFromLink( (union LINK *) AtomicListPop( &atomicList)); struct ELEMENT * heapNode = getElementFromLink( (union LINK *) HeapPop( &heap )); if( queueNode != NULL ) printf("queue returned %d\n", queueNode->Data ); else printf("queue returned null\n"); if( stackNode != NULL ) printf("stack returned %d\n", stackNode->Data ); else printf("stack returned null\n"); if( atomicNode != NULL ) printf("atomic returned %d\n", atomicNode->Data ); else printf("atomic returned null\n"); if( heapNode != NULL ) printf("heap returned %d\n", heapNode->Data ); else printf("heap returned null\n"); }
void TopListPrint(TopList_T* topList) { while (topList->size > 0 && topList->List[0].value <= 0) { HeapPop(topList); } printf(" Get The Top %d List. \n\n", topList->size); PrintHeaderInfo(topList->size); while (topList->size > 0 && topList->List[0].value >0) { PrintCombinStat(&topList->List[0]); HeapPop(topList); } }
int main() { Heap* h; h = HeapCreate(Iintcmp,IintStoreHeapIndex,1); int i,j; for(i=0; i<1000; i++) { j = (i%100)*10 + i/100; Iint* x = (Iint*) malloc(sizeof(Iint)); x->cislo = j; HeapPush(h,(void*) x); printf("%i:%i ", x->heapIndex, ((Iint*)h->array[x->heapIndex])->cislo); } //HeapPrintIint(h); HeapDeleteIndex(h,999); HeapDeleteIndex(h,1); void* x; while(x=HeapPop(h)) { printf("%i\n",((Iint*)x)->cislo); } //HeapPrintInt(h); //x=HeapPop(h); //HeapPrintInt(h); HeapDestroy(h,1); return 0; }
void Wordrec::SegSearch(CHUNKS_RECORD *chunks_record, WERD_CHOICE *best_choice, BLOB_CHOICE_LIST_VECTOR *best_char_choices, WERD_CHOICE *raw_choice, STATE *output_best_state) { int row, col = 0; if (segsearch_debug_level > 0) { tprintf("Starting SegSearch on ratings matrix:\n"); chunks_record->ratings->print(getDict().getUnicharset()); } // Start with a fresh best_choice since rating adjustments // used by the chopper and the new segmentation search are not compatible. best_choice->set_rating(WERD_CHOICE::kBadRating); // Clear best choice accumulator (that is used for adaption), so that // choices adjusted by chopper do not interfere with the results from the // segmentation search. getDict().ClearBestChoiceAccum(); MATRIX *ratings = chunks_record->ratings; // Priority queue containing pain points generated by the language model // The priority is set by the language model components, adjustments like // seam cost and width priority are factored into the priority. HEAP *pain_points = MakeHeap(segsearch_max_pain_points); // best_path_by_column records the lowest cost path found so far for each // column of the chunks_record->ratings matrix over all the rows. BestPathByColumn *best_path_by_column = new BestPathByColumn[ratings->dimension()]; for (col = 0; col < ratings->dimension(); ++col) { best_path_by_column[col].avg_cost = WERD_CHOICE::kBadRating; best_path_by_column[col].best_vse = NULL; } language_model_->InitForWord(prev_word_best_choice_, &denorm_, assume_fixed_pitch_char_segment, best_choice->certainty(), segsearch_max_char_wh_ratio, pain_points, chunks_record); MATRIX_COORD *pain_point; float pain_point_priority; BestChoiceBundle best_choice_bundle( output_best_state, best_choice, raw_choice, best_char_choices); // pending[i] stores a list of the parent/child pair of BLOB_CHOICE_LISTs, // where i is the column of the child. Initially all the classified entries // in the ratings matrix from column 0 (with parent NULL) are inserted into // pending[0]. As the language model state is updated, new child/parent // pairs are inserted into the lists. Next, the entries in pending[1] are // considered, and so on. It is important that during the update the // children are considered in the non-decreasing order of their column, since // this guarantess that all the parents would be up to date before an update // of a child is done. SEG_SEARCH_PENDING_LIST *pending = new SEG_SEARCH_PENDING_LIST[ratings->dimension()]; // Search for the ratings matrix for the initial best path. for (row = 0; row < ratings->dimension(); ++row) { if (ratings->get(0, row) != NOT_CLASSIFIED) { pending[0].add_sorted( SEG_SEARCH_PENDING::compare, true, new SEG_SEARCH_PENDING(row, NULL, LanguageModel::kAllChangedFlag)); } } UpdateSegSearchNodes(0, &pending, &best_path_by_column, chunks_record, pain_points, &best_choice_bundle); // Keep trying to find a better path by fixing the "pain points". int num_futile_classifications = 0; while (!(language_model_->AcceptableChoiceFound() || num_futile_classifications >= segsearch_max_futile_classifications)) { // Get the next valid "pain point". int pop; while (true) { pop = HeapPop(pain_points, &pain_point_priority, &pain_point); if (pop == EMPTY) break; if (pain_point->Valid(*ratings) && ratings->get(pain_point->col, pain_point->row) == NOT_CLASSIFIED) { break; } else { delete pain_point; } } if (pop == EMPTY) { if (segsearch_debug_level > 0) tprintf("Pain points queue is empty\n"); break; } if (segsearch_debug_level > 0) { tprintf("Classifying pain point priority=%.4f, col=%d, row=%d\n", pain_point_priority, pain_point->col, pain_point->row); } BLOB_CHOICE_LIST *classified = classify_piece( chunks_record->chunks, chunks_record->splits, pain_point->col, pain_point->row); ratings->put(pain_point->col, pain_point->row, classified); if (segsearch_debug_level > 0) { print_ratings_list("Updated ratings matrix with a new entry:", ratings->get(pain_point->col, pain_point->row), getDict().getUnicharset()); chunks_record->ratings->print(getDict().getUnicharset()); } // Insert initial "pain points" to join the newly classified blob // with its left and right neighbors. if (!classified->empty()) { float worst_piece_cert; bool fragmented; if (pain_point->col > 0) { language_model_->GetWorstPieceCertainty( pain_point->col-1, pain_point->row, chunks_record->ratings, &worst_piece_cert, &fragmented); language_model_->GeneratePainPoint( pain_point->col-1, pain_point->row, false, LanguageModel::kInitialPainPointPriorityAdjustment, worst_piece_cert, fragmented, best_choice->certainty(), segsearch_max_char_wh_ratio, NULL, NULL, chunks_record, pain_points); } if (pain_point->row+1 < ratings->dimension()) { language_model_->GetWorstPieceCertainty( pain_point->col, pain_point->row+1, chunks_record->ratings, &worst_piece_cert, &fragmented); language_model_->GeneratePainPoint( pain_point->col, pain_point->row+1, true, LanguageModel::kInitialPainPointPriorityAdjustment, worst_piece_cert, fragmented, best_choice->certainty(), segsearch_max_char_wh_ratio, NULL, NULL, chunks_record, pain_points); } } // Record a pending entry with the pain_point and each of its parents. int parent_row = pain_point->col - 1; if (parent_row < 0) { // this node has no parents pending[pain_point->col].add_sorted( SEG_SEARCH_PENDING::compare, true, new SEG_SEARCH_PENDING(pain_point->row, NULL, LanguageModel::kAllChangedFlag)); } else { for (int parent_col = 0; parent_col < pain_point->col; ++parent_col) { if (ratings->get(parent_col, parent_row) != NOT_CLASSIFIED) { pending[pain_point->col].add_sorted( SEG_SEARCH_PENDING::compare, true, new SEG_SEARCH_PENDING(pain_point->row, ratings->get(parent_col, parent_row), LanguageModel::kAllChangedFlag)); } } } UpdateSegSearchNodes(pain_point->col, &pending, &best_path_by_column, chunks_record, pain_points, &best_choice_bundle); if (!best_choice_bundle.updated) ++num_futile_classifications; if (segsearch_debug_level > 0) { tprintf("num_futile_classifications %d\n", num_futile_classifications); } // Clean up best_choice_bundle.updated = false; delete pain_point; // done using this pain point } if (segsearch_debug_level > 0) { tprintf("Done with SegSearch (AcceptableChoiceFound: %d\n", language_model_->AcceptableChoiceFound()); } // Clean up. FreeHeapData(pain_points, MATRIX_COORD::Delete); delete[] best_path_by_column; delete[] pending; for (row = 0; row < ratings->dimension(); ++row) { for (col = 0; col <= row; ++col) { BLOB_CHOICE_LIST *rating = ratings->get(col, row); if (rating != NOT_CLASSIFIED) language_model_->DeleteState(rating); } } }
void fileKMerge( Data *run_devices, const int k, Data *output_device ) { SPD_ASSERT( output_device->medium == File || output_device->medium == Array, "output_device must be pre-allocated!" ); /* Memory buffer */ Data buffer; DAL_init( &buffer ); DAL_allocBuffer( &buffer, DAL_allowedBufSize() ); const dal_size_t bufferedRunSize = DAL_dataSize(&buffer) / (k + 1); //Size of a single buffered run (+1 because of the output buffer) /* TODO: Handle this case */ SPD_ASSERT( bufferedRunSize > 0 , "fileKMerge function doesn't allow a number of runs greater than memory buffer size" ); /* Runs Buffer */ int* runs = buffer.array.data; /* Output buffer */ int* output = buffer.array.data+k*bufferedRunSize; /* Indexes and Offsets for the k buffered runs */ dal_size_t *run_indexes = (dal_size_t*) calloc( sizeof(dal_size_t), k ); dal_size_t *run_offsets = (dal_size_t*) malloc( k * sizeof(dal_size_t) ); dal_size_t *run_buf_sizes = (dal_size_t*) malloc( k * sizeof(dal_size_t) ); /* The auxiliary heap struct */ Heap heap; HeapInit( &heap, k ); dal_size_t j; /* Initializing the buffered runs and the heap */ for ( j=0; j < k; j++ ) { run_buf_sizes[j] = DAL_dataCopyOS( &run_devices[j], 0, &buffer, j*bufferedRunSize, MIN(bufferedRunSize, DAL_dataSize(&run_devices[j])) ); run_offsets[j] = run_buf_sizes[j]; HeapPush( &heap, runs[j*bufferedRunSize], j ); } /* Merging the runs */ dal_size_t outputSize = 0; dal_size_t outputOffset = 0; dal_size_t i; for ( i=0; i<DAL_dataSize(output_device); i++ ) { Min_val min = HeapTop( &heap ); HeapPop( &heap ); //the run index j = min.run_index; dal_size_t remainingSize = DAL_dataSize(&run_devices[j])-run_offsets[j]; if ( ++(run_indexes[j]) < run_buf_sizes[j] ) //If there are others elements in the buffered run HeapPush( &heap, runs[j*bufferedRunSize+run_indexes[j]], j ); //pushes a new element in the heap else if ( remainingSize > 0 ) { //else, if the run has not been read completely run_buf_sizes[j] = DAL_dataCopyOS( &run_devices[j], run_offsets[j], &buffer, j*bufferedRunSize, MIN(remainingSize, bufferedRunSize) ); run_offsets[j] += run_buf_sizes[j]; run_indexes[j] = 0; HeapPush( &heap, runs[j*bufferedRunSize], j ); } output[outputSize++] = min.val; if ( outputSize == bufferedRunSize || i==DAL_dataSize(output_device)-1 ) { //If the output buffer is full outputOffset += DAL_dataCopyOS( &buffer, k*bufferedRunSize, output_device, outputOffset, outputSize ); outputSize = 0; } } /* Freeing memory */ HeapDestroy( &heap ); DAL_destroy( &buffer ); free( run_indexes ); free( run_offsets ); free( run_buf_sizes ); }