Beispiel #1
0
/**********************************************************************
 * save_best_state
 *
 * Save this state away to be compared later.
 **********************************************************************/
void save_best_state(CHUNKS_RECORD *chunks_record) {
  STATE state;
  SEARCH_STATE chunk_groups;
  int num_joints;

  if (save_priorities) {
    num_joints = chunks_record->ratings->dimension() - 1;

    state.part1 = 0xffffffff;
    state.part2 = 0xffffffff;

    chunk_groups = bin_to_chunks (&state, num_joints);
    display_segmentation (chunks_record->chunks, chunk_groups);
    memfree(chunk_groups);

    cprintf ("Enter the correct segmentation > ");
    fflush(stdout);
    state.part1 = 0;
    scanf ("%x", &state.part2);

    chunk_groups = bin_to_chunks (&state, num_joints);
    display_segmentation (chunks_record->chunks, chunk_groups);
    memfree(chunk_groups);
    window_wait(segm_window);  /* == 'n') */

    if (known_best_state)
      free_state(known_best_state);
    known_best_state = new_state (&state);
  }
}
Beispiel #2
0
BLOB_CHOICE_LIST *Wordrec::classify_piece(TBLOB *pieces,
                                          const DENORM& denorm,
                                          SEAMS seams,
                                          inT16 start,
                                          inT16 end,
                                          BlamerBundle *blamer_bundle) {
  BLOB_CHOICE_LIST *choices;
  TBLOB *blob;
  inT16 x;

  join_pieces(pieces, seams, start, end);
  for (blob = pieces, x = 0; x < start; x++) {
    blob = blob->next;
  }
  choices = classify_blob(blob, denorm, "pieces:", White, blamer_bundle);

  break_pieces(blob, seams, start, end);
#ifndef GRAPHICS_DISABLED
  if (wordrec_display_segmentations > 2) {
    STATE current_state;
    SEARCH_STATE chunk_groups;
    set_n_ones (&current_state, array_count(seams));
    chunk_groups = bin_to_chunks(&current_state, array_count(seams));
    display_segmentation(pieces, chunk_groups);
    window_wait(segm_window);
    memfree(chunk_groups);
  }
#endif

  return (choices);
}
BLOB_CHOICE_LIST *Wordrec::classify_piece(TBLOB *pieces,
                                          SEAMS seams,
                                          inT16 start,
                                          inT16 end) {
  STATE current_state;
  BLOB_CHOICE_LIST *choices;
  TBLOB *pblob;
  TBLOB *blob;
  TBLOB *nblob;
  inT16 x;
  SEARCH_STATE chunk_groups;

  set_n_ones (&current_state, array_count (seams));

  join_pieces(pieces, seams, start, end);
  for (blob = pieces, pblob = NULL, x = 0; x < start; x++) {
    pblob = blob;
    blob = blob->next;
  }
  for (nblob = blob->next; x < end; x++)
    nblob = nblob->next;
  choices = classify_blob (pblob, blob, nblob, NULL, "pieces:", White);

  break_pieces(blob, seams, start, end);
#ifndef GRAPHICS_DISABLED
  if (wordrec_display_segmentations > 2) {
    chunk_groups = bin_to_chunks (&current_state, array_count (seams));
    display_segmentation(pieces, chunk_groups);
    window_wait(segm_window);
    memfree(chunk_groups);
  }
#endif

  return (choices);
}
/**
 * @name evaluate_state
 *
 * Evaluate the segmentation that is represented by this state in the
 * best first search.  Add this state to the "states_seen" list.
 */
inT16 Wordrec::evaluate_state(CHUNKS_RECORD *chunks_record,
                              SEARCH_RECORD *the_search,
                              DANGERR *fixpt) {
  BLOB_CHOICE_LIST_VECTOR *char_choices;
  SEARCH_STATE chunk_groups;
  float rating_limit = the_search->best_choice->rating();
  inT16 keep_going = TRUE;
  PIECES_STATE widths;

  the_search->num_states++;
  chunk_groups = bin_to_chunks(the_search->this_state,
                               the_search->num_joints);
  bin_to_pieces (the_search->this_state, the_search->num_joints, widths);
  getDict().LogNewSegmentation(widths);

  char_choices = evaluate_chunks(chunks_record, chunk_groups);
  wordseg_rating_adjust_factor = -1.0f;
  if (char_choices != NULL && char_choices->length() > 0) {
    // Compute the segmentation cost and include the cost in word rating.
    // TODO(dsl): We should change the SEARCH_RECORD to store this cost
    // from state evaluation and avoid recomputing it here.
    prioritize_state(chunks_record, the_search);
    wordseg_rating_adjust_factor = the_search->segcost_bias;
    getDict().permute_characters(*char_choices, rating_limit,
                                 the_search->best_choice,
                                 the_search->raw_choice);
    bool replaced = false;
    if (getDict().AcceptableChoice(char_choices, the_search->best_choice,
                                   *(the_search->raw_choice), fixpt,
                                   ASSOCIATOR_CALLER, &replaced)) {
      keep_going = FALSE;
    }
  }
  wordseg_rating_adjust_factor = -1.0f;

#ifndef GRAPHICS_DISABLED
  if (wordrec_display_segmentations) {
    display_segmentation (chunks_record->chunks, chunk_groups);
    if (wordrec_display_segmentations > 1)
      window_wait(segm_window);
  }
#endif

  if (rating_limit != the_search->best_choice->rating()) {
    the_search->before_best = the_search->num_states;
    the_search->best_state->part1 = the_search->this_state->part1;
    the_search->best_state->part2 = the_search->this_state->part2;
    replace_char_widths(chunks_record, chunk_groups);
  }
  else if (char_choices != NULL)
    fixpt->index = -1;

  if (char_choices != NULL) delete char_choices;
  memfree(chunk_groups);

  return (keep_going);
}
/**********************************************************************
 * classify_piece
 *
 * Create a larger piece from a collection of smaller ones.  Classify
 * it and return the results.  Take the large piece apart to leave
 * the collection of small pieces un modified.
 **********************************************************************/
CHOICES classify_piece(TBLOB *pieces,
                       SEAMS seams,
                       INT16 start,
                       INT16 end,
                       INT32 fx,
                       STATE *this_state,
                       STATE *best_state,
                       INT32 pass,
                       INT32 blob_index) {
  STATE current_state;
  CHOICES choices;
  TBLOB *pblob;
  TBLOB *blob;
  TBLOB *nblob;
  INT16 x;
  SEARCH_STATE chunk_groups;

  set_n_ones (&current_state, array_count (seams));

  join_pieces(pieces, seams, start, end); 
  for (blob = pieces, pblob = NULL, x = 0; x < start; x++) {
    pblob = blob;
    blob = blob->next;
  }
  for (nblob = blob->next; x < end; x++)
    nblob = nblob->next;
  choices = classify_blob (pblob, blob, nblob, NULL, fx, "pieces:", White,
    this_state, best_state, pass, blob_index);

  break_pieces(blob, seams, start, end); 
#ifndef GRAPHICS_DISABLED
  if (display_segmentations > 2) {
    chunk_groups = bin_to_chunks (&current_state, array_count (seams));
    display_segmentation(pieces, chunk_groups); 
    window_wait(segm_window); 
    memfree(chunk_groups); 
  }
#endif

  return (choices);
}
Beispiel #6
0
/**
 * rebuild_current_state
 *
 * Transfers the given state to the word's output fields: rebuild_word,
 * best_state, box_word, and returns the corresponding blob choices.
 */
BLOB_CHOICE_LIST_VECTOR *Wordrec::rebuild_current_state(
    WERD_RES *word,
    STATE *state,
    BLOB_CHOICE_LIST_VECTOR *old_choices,
    MATRIX *ratings) {
  // Initialize search_state, num_joints, x, y.
  int num_joints = array_count(word->seam_array);
#ifndef GRAPHICS_DISABLED
    if (wordrec_display_segmentations) {
      print_state("Rebuilding state", state, num_joints);
    }
#endif
  // Setup the rebuild_word ready for the output blobs.
  if (word->rebuild_word != NULL)
    delete word->rebuild_word;
  word->rebuild_word = new TWERD;
  // Setup the best_state.
  word->best_state.clear();
  SEARCH_STATE search_state = bin_to_chunks(state, num_joints);
  // See which index is which below for information on x and y.
  int x = 0;
  int y;
  for (int i = 1; i <= search_state[0]; i++) {
    y = x + search_state[i];
    x = y + 1;
  }
  y = count_blobs(word->chopped_word->blobs) - 1;

  // Initialize char_choices, expanded_fragment_lengths:
  // e.g. if fragment_lengths = {1 1 2 3 1},
  // expanded_fragment_lengths_str = {1 1 2 2 3 3 3 1}.
  BLOB_CHOICE_LIST_VECTOR *char_choices = new BLOB_CHOICE_LIST_VECTOR();
  STRING expanded_fragment_lengths_str = "";
  bool state_has_fragments = false;
  const char *fragment_lengths = NULL;

  if (word->best_choice->length() > 0) {
    fragment_lengths = word->best_choice->fragment_lengths();
  }
  if (fragment_lengths) {
    for (int i = 0; i < word->best_choice->length(); ++i) {
      *char_choices += NULL;
      word->best_state.push_back(0);
      if (fragment_lengths[i] > 1) {
        state_has_fragments = true;
      }
      for (int j = 0; j < fragment_lengths[i]; ++j) {
        expanded_fragment_lengths_str += fragment_lengths[i];
      }
    }
  } else {
    for (int i = 0; i <= search_state[0]; ++i) {
      expanded_fragment_lengths_str += (char)1;
      *char_choices += NULL;
      word->best_state.push_back(0);
    }
  }

  // Set up variables for concatenating fragments.
  const char *word_lengths_ptr = NULL;
  const char *word_ptr = NULL;
  if (state_has_fragments) {
    // Make word_lengths_ptr point to the last element in
    // best_choice->unichar_lengths().
    word_lengths_ptr = word->best_choice->unichar_lengths().string();
    word_lengths_ptr += (strlen(word_lengths_ptr)-1);
    // Make word_str point to the beginning of the last
    // unichar in best_choice->unichar_string().
    word_ptr = word->best_choice->unichar_string().string();
    word_ptr += (strlen(word_ptr)-*word_lengths_ptr);
  }
  const char *expanded_fragment_lengths =
    expanded_fragment_lengths_str.string();
  char unichar[UNICHAR_LEN + 1];

  // Populate char_choices list such that it corresponds to search_state.
  //
  // If we are rebuilding a state that contains character fragments:
  // -- combine blobs that belong to character fragments
  // -- re-classify the blobs to obtain choices list for the merged blob
  // -- ensure that correct classification appears in the new choices list
  //    NOTE: a choice composed form original fragment choices will be always
  //    added to the new choices list for each character composed from
  //    fragments (even if the choice for the corresponding character appears
  //    in the re-classified choices list of for the newly merged blob).
  int ss_index = search_state[0];
  // Which index is which?
  // char_choices_index refers to the finished product: there is one for each
  // blob/unicharset entry in the final word.
  // ss_index refers to the search_state, and indexes a group (chunk) of blobs
  // that were classified together for the best state.
  // old_choice_index is a copy of ss_index, and accesses the old_choices,
  // which correspond to chunks in the best state. old_choice_index gets
  // set to -1 on a fragment set, as there is no corresponding chunk in
  // the best state.
  // x and y refer to the underlying blobs and are the first and last blob
  // indices in a chunk.
  for (int char_choices_index = char_choices->length() - 1;
       char_choices_index >= 0;
       --char_choices_index) {
    // The start and end of the blob to rebuild.
    int true_x = x;
    int true_y = y;
    // The fake merged fragment choice.
    BLOB_CHOICE* merged_choice = NULL;
    // Test for and combine fragments first.
    int fragment_pieces = expanded_fragment_lengths[ss_index];
    int old_choice_index = ss_index;

    if (fragment_pieces > 1) {
      strncpy(unichar, word_ptr, *word_lengths_ptr);
      unichar[*word_lengths_ptr] = '\0';
      merged_choice = rebuild_fragments(unichar, expanded_fragment_lengths,
                                        old_choice_index, old_choices);
      old_choice_index = -1;
    }
    while (fragment_pieces > 0) {
      true_x = x;
      // Move left to the previous blob.
      y = x - 1;
      x = y - search_state[ss_index--];
      --fragment_pieces;
    }
    word->best_state[char_choices_index] = true_y + 1 - true_x;
    BLOB_CHOICE_LIST *current_choices = join_blobs_and_classify(
        word, true_x, true_y, old_choice_index, ratings, old_choices);
    if (merged_choice != NULL) {
      // Insert merged_blob into current_choices, such that current_choices
      // are still sorted in non-descending order by rating.
      ASSERT_HOST(!current_choices->empty());
      BLOB_CHOICE_IT choice_it(current_choices);
      for (choice_it.mark_cycle_pt(); !choice_it.cycled_list() &&
           merged_choice->rating() > choice_it.data()->rating();
           choice_it.forward())
        choice_it.add_before_stay_put(merged_choice);
    }
    // Get rid of fragments in current_choices.
    BLOB_CHOICE_IT choice_it(current_choices);
    for (choice_it.mark_cycle_pt(); !choice_it.cycled_list();
        choice_it.forward()) {
      if (getDict().getUnicharset().get_fragment(
          choice_it.data()->unichar_id())) {
        delete choice_it.extract();
      }
    }
    char_choices->set(current_choices, char_choices_index);

    // Update word_ptr and word_lengths_ptr.
    if (word_lengths_ptr != NULL && word_ptr != NULL) {
      word_lengths_ptr--;
      word_ptr -= (*word_lengths_ptr);
    }
  }
  old_choices->delete_data_pointers();
  delete old_choices;
  memfree(search_state);

  return char_choices;
}
Beispiel #7
0
/**
 * @name evaluate_state
 *
 * Evaluate the segmentation that is represented by this state in the
 * best first search.  Add this state to the "states_seen" list.
 */
inT16 Wordrec::evaluate_state(CHUNKS_RECORD *chunks_record,
                              SEARCH_RECORD *the_search,
                              DANGERR *fixpt,
                              BlamerBundle *blamer_bundle) {
  BLOB_CHOICE_LIST_VECTOR *char_choices;
  SEARCH_STATE chunk_groups;
  float rating_limit = the_search->best_choice->rating();
  bool keep_going = true;
  PIECES_STATE widths;

  the_search->num_states++;
  chunk_groups = bin_to_chunks(the_search->this_state,
                               the_search->num_joints);
  bin_to_pieces (the_search->this_state, the_search->num_joints, widths);
  if (wordrec_debug_level > 1) {
    log_state("Evaluating state", the_search->num_joints,
              the_search->this_state);
  }
  getDict().LogNewSegmentation(widths);

  char_choices = evaluate_chunks(chunks_record, chunk_groups, blamer_bundle);
  getDict().SetWordsegRatingAdjustFactor(-1.0f);
  bool updated_best_choice = false;
  if (char_choices != NULL && char_choices->length() > 0) {
    // Compute the segmentation cost and include the cost in word rating.
    // TODO(dsl): We should change the SEARCH_RECORD to store this cost
    // from state evaluation and avoid recomputing it here.
    prioritize_state(chunks_record, the_search);
    getDict().SetWordsegRatingAdjustFactor(the_search->segcost_bias);
    updated_best_choice =
      getDict().permute_characters(*char_choices,
                                   the_search->best_choice,
                                   the_search->raw_choice);
    bool replaced = false;
    if (updated_best_choice) {
      if (getDict().AcceptableChoice(char_choices, the_search->best_choice,
                                     NULL, ASSOCIATOR_CALLER, &replaced)) {
        keep_going = false;
      }
      CopyCharChoices(*char_choices, the_search->best_char_choices);
    }
  }
  getDict().SetWordsegRatingAdjustFactor(-1.0f);

#ifndef GRAPHICS_DISABLED
  if (wordrec_display_segmentations) {
    display_segmentation (chunks_record->chunks, chunk_groups);
    if (wordrec_display_segmentations > 1)
      window_wait(segm_window);
  }
#endif

  if (rating_limit != the_search->best_choice->rating()) {
    ASSERT_HOST(updated_best_choice);
    the_search->before_best = the_search->num_states;
    the_search->best_state->part1 = the_search->this_state->part1;
    the_search->best_state->part2 = the_search->this_state->part2;
    replace_char_widths(chunks_record, chunk_groups);
  } else {
    ASSERT_HOST(!updated_best_choice);
    if (char_choices != NULL) fixpt->clear();
  }

  if (char_choices != NULL) delete char_choices;
  memfree(chunk_groups);

  return (keep_going);
}
/**
 * rebuild_current_state
 *
 * Evaluate the segmentation that is represented by this state in the
 * best first search.  Add this state to the "states_seen" list.
 */
BLOB_CHOICE_LIST_VECTOR *Wordrec::rebuild_current_state(
    TBLOB *blobs,
    SEAMS seam_list,
    STATE *state,
    BLOB_CHOICE_LIST_VECTOR *old_choices,
    int fx,
    bool force_rebuild,
    const WERD_CHOICE &best_choice,
    const MATRIX *ratings) {
  // Initialize search_state, num_joints, x, y.
  int num_joints = array_count(seam_list);
#ifndef GRAPHICS_DISABLED
    if (wordrec_display_segmentations) {
      print_state("Rebuiling state", state, num_joints);
    }
#endif
  SEARCH_STATE search_state = bin_to_chunks(state, num_joints);
  int x = 0;
  int y;
  int i;
  for (i = 1; i <= search_state[0]; i++) {
    y = x + search_state[i];
    x = y + 1;
  }
  y = count_blobs (blobs) - 1;

  // Initialize char_choices, expanded_fragment_lengths:
  // e.g. if fragment_lengths = {1 1 2 3 1},
  // expanded_fragment_lengths_str = {1 1 2 2 3 3 3 1}.
  BLOB_CHOICE_LIST_VECTOR *char_choices = new BLOB_CHOICE_LIST_VECTOR();
  STRING expanded_fragment_lengths_str = "";
  bool state_has_fragments = false;
  const char *fragment_lengths = NULL;

  if (best_choice.length() > 0) {
    fragment_lengths = best_choice.fragment_lengths();
  }
  if (fragment_lengths) {
    for (int i = 0; i < best_choice.length(); ++i) {
      *char_choices += NULL;
      if (fragment_lengths[i] > 1) {
        state_has_fragments = true;
      }
      for (int j = 0; j < fragment_lengths[i]; ++j) {
        expanded_fragment_lengths_str += fragment_lengths[i];
      }
    }
  } else {
    for (i = 0; i <= search_state[0]; ++i) {
      expanded_fragment_lengths_str += (char)1;
      *char_choices += NULL;
    }
  }

  // Finish early if force_rebuld is false and there are no fragments to merge.
  if (!force_rebuild && !state_has_fragments) {
    delete char_choices;
    memfree(search_state);
    return old_choices;
  }

  // Set up variables for concatenating fragments.
  const char *word_lengths_ptr = NULL;
  const char *word_ptr = NULL;
  if (state_has_fragments) {
    // Make word_lengths_ptr point to the last element in
    // best_choice->unichar_lengths().
    word_lengths_ptr = best_choice.unichar_lengths().string();
    word_lengths_ptr += (strlen(word_lengths_ptr)-1);
    // Make word_str point to the beginning of the last
    // unichar in best_choice->unichar_string().
    word_ptr = best_choice.unichar_string().string();
    word_ptr += (strlen(word_ptr)-*word_lengths_ptr);
  }
  const char *expanded_fragment_lengths =
    expanded_fragment_lengths_str.string();
  bool merging_fragment = false;
  int true_y = -1;
  char unichar[UNICHAR_LEN + 1];
  int fragment_pieces = -1;
  float rating = 0.0;
  float certainty = -MAX_FLOAT32;

  // Populate char_choices list such that it corresponds to search_state.
  //
  // If we are rebuilding a state that contains character fragments:
  // -- combine blobs that belong to character fragments
  // -- re-classify the blobs to obtain choices list for the merged blob
  // -- ensure that correct classification appears in the new choices list
  //    NOTE: a choice composed form original fragment choices will be always
  //    added to the new choices list for each character composed from
  //    fragments (even if the choice for the corresponding character appears
  //    in the re-classified choices list of for the newly merged blob).
  BLOB_CHOICE_IT temp_it;
  int char_choices_index = char_choices->length() - 1;
  for (i = search_state[0]; i >= 0; i--) {
    BLOB_CHOICE_LIST *current_choices = join_blobs_and_classify(
        blobs, seam_list, x, y, fx, ratings, old_choices);
    // Combine character fragments.
    if (expanded_fragment_lengths[i] > 1) {
      // Start merging character fragments.
      if (!merging_fragment) {
        merging_fragment = true;
        true_y = y;
        fragment_pieces = expanded_fragment_lengths[i];
        rating = 0.0;
        certainty = -MAX_FLOAT32;
        strncpy(unichar, word_ptr, *word_lengths_ptr);
        unichar[*word_lengths_ptr] = '\0';
      }
      // Take into account the fact that we could have joined pieces
      // since we first recorded the ending point of a fragment (true_y).
      true_y -= y - x;
      // Populate fragment with updated values and look for the
      // fragment with the same values in current_choices.
      // Update rating and certainty of the character being composed.
      fragment_pieces--;
      CHAR_FRAGMENT fragment;
      fragment.set_all(unichar, fragment_pieces,
                       expanded_fragment_lengths[i]);
      temp_it.set_to_list(current_choices);
      for (temp_it.mark_cycle_pt(); !temp_it.cycled_list();
           temp_it.forward()) {
        const CHAR_FRAGMENT *current_fragment =
          getDict().getUnicharset().get_fragment(temp_it.data()->unichar_id());
        if (current_fragment && fragment.equals(current_fragment)) {
          rating += temp_it.data()->rating();
          if (temp_it.data()->certainty() > certainty) {
            certainty = temp_it.data()->certainty();
          }
          break;
        }
      }
      assert(!temp_it.cycled_list());  // make sure we found the fragment
      // Free current_choices for the fragmented character.
      delete current_choices;

      // Finish composing character from fragments.
      if (fragment_pieces == 0) {
        // Populate current_choices with the classification of
        // the blob merged from blobs of each character fragment.
        current_choices = join_blobs_and_classify(blobs, seam_list, x,
                                                  true_y, fx, ratings, NULL);
        BLOB_CHOICE *merged_choice =
          new BLOB_CHOICE(getDict().getUnicharset().unichar_to_id(unichar),
                          rating, certainty, 0, NO_PERM);

        // Insert merged_blob into current_choices, such that current_choices
        // are still sorted in non-descending order by rating.
        ASSERT_HOST(!current_choices->empty());
        temp_it.set_to_list(current_choices);
        for (temp_it.mark_cycle_pt();
             !temp_it.cycled_list() &&
             merged_choice->rating() > temp_it.data()->rating();
             temp_it.forward());
        temp_it.add_before_stay_put(merged_choice);

        // Done merging this fragmented character.
        merging_fragment = false;
      }
    }
    if (!merging_fragment) {
      // Get rid of fragments in current_choices.
      temp_it.set_to_list(current_choices);
      for (temp_it.mark_cycle_pt(); !temp_it.cycled_list();
           temp_it.forward()) {
        if (getDict().getUnicharset().get_fragment(
            temp_it.data()->unichar_id())) {
          delete temp_it.extract();
        }
      }
      char_choices->set(current_choices, char_choices_index);
      char_choices_index--;

      // Update word_ptr and word_lengths_ptr.
      if (word_lengths_ptr != NULL && word_ptr != NULL) {
        word_lengths_ptr--;
        word_ptr -= (*word_lengths_ptr);
      }
    }
    y = x - 1;
    x = y - search_state[i];
  }
  old_choices->delete_data_pointers();
  delete old_choices;
  memfree(search_state);

  return (char_choices);
}