Exemple #1
0
// Gathers outline points and their directions from start_index into dirs by
// stepping along the outline and normalizing the coordinates until the
// required feature_length has been collected or end_index is reached.
// On input pos must point to the position corresponding to start_index and on
// return pos is updated to the current raw position, and pos_normed is set to
// the normed version of pos.
// Since directions wrap-around, they need special treatment to get the mean.
// Provided the cluster of directions doesn't straddle the wrap-around point,
// the simple mean works. If they do, then, unless the directions are wildly
// varying, the cluster rotated by 180 degrees will not straddle the wrap-
// around point, so mean(dir + 180 degrees) - 180 degrees will work. Since
// LLSQ conveniently stores the mean of 2 variables, we use it to store
// dir and dir+128 (128 is 180 degrees) and then use the resulting mean
// with the least variance.
static int GatherPoints(const C_OUTLINE* outline, double feature_length,
                        const DENORM& denorm, const DENORM* root_denorm,
                        int start_index, int end_index,
                        ICOORD* pos, FCOORD* pos_normed,
                        LLSQ* points, LLSQ* dirs) {
  int step_length = outline->pathlength();
  ICOORD step = outline->step(start_index % step_length);
  // Prev_normed is the start point of this collection and will be set on the
  // first iteration, and on later iterations used to determine the length
  // that has been collected.
  FCOORD prev_normed;
  points->clear();
  dirs->clear();
  int num_points = 0;
  int index;
  for (index = start_index; index <= end_index; ++index, *pos += step) {
    step = outline->step(index % step_length);
    int edge_weight = outline->edge_strength_at_index(index % step_length);
    if (edge_weight == 0) {
      // This point has conflicting gradient and step direction, so ignore it.
      continue;
    }
    // Get the sub-pixel precise location and normalize.
    FCOORD f_pos = outline->sub_pixel_pos_at_index(*pos, index % step_length);
    denorm.NormTransform(root_denorm, f_pos, pos_normed);
    if (num_points == 0) {
      // The start of this segment.
      prev_normed = *pos_normed;
    } else {
      FCOORD offset = *pos_normed - prev_normed;
      float length = offset.length();
      if (length > feature_length) {
        // We have gone far enough from the start. We will use this point in
        // the next set so return what we have so far.
        return index;
      }
    }
    points->add(pos_normed->x(), pos_normed->y(), edge_weight);
    int direction = outline->direction_at_index(index % step_length);
    if (direction >= 0) {
      direction = NormalizeDirection(direction, f_pos, denorm, root_denorm);
      // Use both the direction and direction +128 so we are not trying to
      // take the mean of something straddling the wrap-around point.
      dirs->add(direction, Modulo(direction + 128, 256));
    }
    ++num_points;
  }
  return index;
}
Exemple #2
0
// Fits a line in the given direction to blobs that are close to the given
// target_offset perpendicular displacement from the direction. The fit
// error is allowed to be cheat_allowance worse than the existing fit, and
// will still be used.
// If cheat_allowance > 0, the new fit will be good and replace the current
// fit if it has better fit (with cheat) OR its error is below
// max_baseline_error_ and the old fit is marked bad.
// Otherwise the new fit will only replace the old if it is really better,
// or the old fit is marked bad and the new fit has sufficient points, as
// well as being within the max_baseline_error_.
void BaselineRow::FitConstrainedIfBetter(int debug,
                                         const FCOORD& direction,
                                         double cheat_allowance,
                                         double target_offset) {
  double halfrange = fit_halfrange_ * direction.length();
  double min_dist = target_offset - halfrange;
  double max_dist = target_offset + halfrange;
  ICOORD line_pt;
  double new_error = fitter_.ConstrainedFit(direction, min_dist, max_dist,
                                            debug > 2, &line_pt);
  // Allow cheat_allowance off the new error
  new_error -= cheat_allowance;
  double old_angle = BaselineAngle();
  double new_angle = direction.angle();
  if (debug > 1) {
    tprintf("Constrained error = %g, original = %g",
            new_error, baseline_error_);
    tprintf(" angles = %g, %g, delta=%g vs threshold %g\n",
            old_angle, new_angle,
            new_angle - old_angle, kMaxSkewDeviation);
  }
  bool new_good_baseline = new_error <= max_baseline_error_ &&
      (cheat_allowance > 0.0 || fitter_.SufficientPointsForIndependentFit());
  // The new will replace the old if any are true:
  // 1. the new error is better
  // 2. the old is NOT good, but the new is
  // 3. there is a wild angular difference between them (assuming that the new
  //    is a better guess at the angle.)
  if (new_error <= baseline_error_ ||
      (!good_baseline_ && new_good_baseline) ||
      fabs(new_angle - old_angle) > kMaxSkewDeviation) {
    baseline_error_ = new_error;
    baseline_pt1_ = line_pt;
    baseline_pt2_ = baseline_pt1_ + direction;
    good_baseline_ = new_good_baseline;
    if (debug > 1) {
      tprintf("Replacing with constrained baseline, good = %d\n",
              good_baseline_);
    }
  } else if (debug > 1) {
    tprintf("Keeping old baseline\n");
  }
}
Exemple #3
0
// Computes and returns the displacement of the center of the line
// perpendicular to the given direction.
double BaselineRow::PerpDisp(const FCOORD& direction) const {
  float middle_x = (bounding_box_.left() + bounding_box_.right()) / 2.0f;
  FCOORD middle_pos(middle_x, StraightYAtX(middle_x));
  return direction * middle_pos / direction.length();
}