예제 #1
0
/*
 * call-seq:
 *   sub(val[,mask])
 *
 * Return new CvScalar if <i>val</i> is CvScalar or compatible object.
 *   self[I] - val[I]
 * Or return new CvMat if <i>val</i> is CvMat or subclass.
 */
VALUE
rb_sub(int argc, VALUE *argv, VALUE self)
{
  VALUE val, mask;
  rb_scan_args(argc, argv, "11", &val, &mask);
  if (rb_obj_is_kind_of(val, cCvMat::rb_class())) {
    CvArr *val_ptr = CVARR(val);
    VALUE dest = Qnil;
    try {
      dest = cCvMat::new_object(cvGetSize(val_ptr), cvGetElemType(val_ptr));
      cvSubRS(val_ptr, *CVSCALAR(self), CVARR(dest), MASK(mask));
    }
    catch (cv::Exception& e) {
      raise_cverror(e);
    }
    return dest;
  }
  else {
    CvScalar *src = CVSCALAR(self);
    CvScalar scl = VALUE_TO_CVSCALAR(val);
    return new_object(cvScalar(src->val[0] - scl.val[0],
                               src->val[1] - scl.val[1],
                               src->val[2] - scl.val[2],
                               src->val[3] - scl.val[3]));
  }
}
예제 #2
0
/*
 * call-seq:
 *   retrieve -> IplImage or nil
 *
 * Gets the image grabbed with grab.
 */
VALUE
rb_retrieve(VALUE self)
{
  IplImage *frame = cvRetrieveFrame(CVCAPTURE(self));
  if(!frame)
    return Qnil;
  VALUE image = cIplImage::new_object(cvSize(frame->width, frame->height), CV_MAKETYPE(CV_8U, frame->nChannels));
  if (frame->origin == IPL_ORIGIN_TL) {
    cvCopy(frame, CVARR(image));
  } else {
    cvFlip(frame, CVARR(image));
  }
  return image;
}
예제 #3
0
/*
 * Determines whether the point is inside a contour, outside, or lies on an edge (or coinsides with a vertex).
 * @overload point_polygon_test(point, measure_dist)
 *   @param point [CvPoint2D32f] Point tested against the contour
 *   @param measure_dist [Boolean] If true, the method estimates the signed distance from the point to
 *      the nearest contour edge. Otherwise, the function only checks if the point is inside a contour or not.
 * @return [Number] When measure_dist = false, the return value is +1, -1 and 0, respectively.
 *   When measure_dist = true, it is a signed distance between the point and the nearest contour edge.
 * @opencv_func cvPointPolygonTest
 */
VALUE
rb_point_polygon_test(VALUE self, VALUE point, VALUE measure_dist)
{
  int measure_dist_flag;

  if (measure_dist == Qtrue)
    measure_dist_flag = 1;
  else if (measure_dist == Qfalse)
    measure_dist_flag = 0;
  else
    measure_dist_flag = NUM2INT(measure_dist);

  double dist = Qnil;
  try {
    dist = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), measure_dist_flag);
  }
  catch (cv::Exception& e) {
    raise_cverror(e);
  }
  
  /* cvPointPolygonTest returns 100, -100 or 0 when measure_dist = 0 */
  if ((!measure_dist_flag) && ((int)dist) != 0)
    dist = (dist > 0) ? 1 : -1;

  return rb_float_new(dist);
}
예제 #4
0
/*
 * call-seq:
 *   sub(val[,mask])
 *
 * Return new CvScalar if <i>val</i> is CvScalar or compatible object.
 *   self[I] - val[I]
 * Or return new CvMat if <i>val</i> is CvMat or subclass.
 */
VALUE
rb_sub(int argc, VALUE *argv, VALUE self)
{
  VALUE val, mask;
  rb_scan_args(argc, argv, "11", &val, &mask);
  if(rb_obj_is_kind_of(val, cCvMat::rb_class())){
    VALUE dest = cCvMat::new_object(cvGetSize(CVARR(val)), cvGetElemType(CVARR(val)));
    cvSubRS(CVARR(val), *CVSCALAR(self), CVARR(dest), MASK(mask));
    return dest;
  }else{
    CvScalar *src = CVSCALAR(self), scl = VALUE_TO_CVSCALAR(val);
    return new_object(cvScalar(src->val[0] - scl.val[0],
                               src->val[1] - scl.val[1],
                               src->val[2] - scl.val[2],
                               src->val[3] - scl.val[3]));
  }
}
예제 #5
0
/*
 * call-seq:
 *   arc_length(<i>[slice = nil][,is_closed = nil]</i>) -> float
 *
 * Calculates contour perimeter or curve length.
 * <i>slice</i> is starting and ending points of the curve.
 * <i>is_closed</i> is indicates whether the curve is closed or not. There are 3 cases:
 * * is_closed = true  - the curve is assumed to be unclosed.
 * * is_closed = false - the curve is assumed to be closed.
 * * is_closed = nil (default) use self#close?
 */
VALUE
rb_arc_length(int argc, VALUE *argv, VALUE self)
{
  VALUE slice, is_closed;
  rb_scan_args(argc, argv, "02", &slice, &is_closed);
  return rb_float_new(cvArcLength(CVARR(self),
				  NIL_P(slice) ? CV_WHOLE_SEQ : VALUE_TO_CVSLICE(slice),
				  TRUE_OR_FALSE(is_closed, -1)));
}
예제 #6
0
/*
 * call-seq:
 *   match_shapes(object, method) -> float
 *
 * Compares two shapes(self and object). <i>object</i> should be CvContour.
 *
 * A - object1, B - object2:
 * * method=CV_CONTOURS_MATCH_I1
 *     I1(A,B)=sumi=1..7abs(1/mAi - 1/mBi)
 * * method=CV_CONTOURS_MATCH_I2
 *     I2(A,B)=sumi=1..7abs(mAi - mBi)
 * * method=CV_CONTOURS_MATCH_I3
 *     I3(A,B)=sumi=1..7abs(mAi - mBi)/abs(mAi)
 */
VALUE
rb_match_shapes(int argc, VALUE *argv, VALUE self)
{
  VALUE object, method, param;
  rb_scan_args(argc, argv, "21", &object, &method, &param);
  int method_flag = CVMETHOD("COMPARISON_METHOD", method);
  if (!rb_obj_is_kind_of(object, cCvContour::rb_class()))
    rb_raise(rb_eTypeError, "argument 1 (shape) should be %s",
        rb_class2name(cCvContour::rb_class()));
  double result = 0;
  try {
    result = cvMatchShapes(CVARR(self), CVARR(object), method_flag);
  }
  catch (cv::Exception& e) {
    raise_cverror(e);
  }
  return rb_float_new(result);
}
예제 #7
0
/*
 * call-seq:
 *   query -> IplImage or nil
 *
 * Grabs and returns a frame camera or file. Just a combination of grab and retrieve in one call.
 */
VALUE
rb_query(VALUE self)
{
  VALUE image = Qnil;
  try {
    IplImage *frame = cvQueryFrame(CVCAPTURE(self));
    if (!frame)
      return Qnil;
    image = cIplImage::new_object(cvSize(frame->width, frame->height),
				  CV_MAKETYPE(CV_8U, frame->nChannels));
    if (frame->origin == IPL_ORIGIN_TL)
      cvCopy(frame, CVARR(image));
    else
      cvFlip(frame, CVARR(image));
  }
  catch (cv::Exception& e) {
    raise_cverror(e);
  }
  return image;
}
예제 #8
0
/*
 * Calculates distance between a point and the nearest contour edgex
 * @overload measure_distance(point)
 *   @param point [CvPoint2D32f] Point tested against the contour
 * @return Signed distance between the point and the nearest contour edge
 * @opencv_func cvPointPolygonTest
 */
VALUE
rb_measure_distance(VALUE self, VALUE point)
{
  double distance = 0;
  try {
    distance = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), 1);
  }
  catch (cv::Exception& e) {
    raise_cverror(e);
  }
  return rb_float_new(distance);
}
예제 #9
0
/*
 * Performs a point-in-contour test.
 * The method determines whether the point is inside a contour, outside,
 * or lies on an edge (or coincides with a vertex).
 * @overload in?(point)
 *   @param point [CvPoint2D32f] Point tested against the contour
 * @return [Boolean] If the point is inside, returns true. If outside, returns false.
 *   If lies on an edge, returns nil.
 * @opencv_func cvPointPolygonTest
 */
VALUE
rb_in_q(VALUE self, VALUE point)
{
  double n = 0;
  try {
    n = cvPointPolygonTest(CVARR(self), VALUE_TO_CVPOINT2D32F(point), 0);
  }
  catch (cv::Exception& e) {
    raise_cverror(e);
  }
  return n == 0 ? Qnil : n > 0 ? Qtrue : Qfalse;
}
예제 #10
0
/*
 * call-seq:
 *   CvMoments.new(<i>src[,is_binary = nil]</i>)
 *
 * Calculates all moments up to third order of a polygon or rasterized shape.
 * <i>src</i> should be CvMat or CvPolygon.
 *
 * If is_binary = true, all the zero pixel values are treated as zeroes, all the others are treated as 1's.
 */
VALUE
rb_initialize(int argc, VALUE *argv, VALUE self)
{
  VALUE src, is_binary;
  rb_scan_args(argc, argv, "02", &src, &is_binary);
  if (!NIL_P(src)) {
    if (rb_obj_is_kind_of(src, cCvMat::rb_class()) || rb_obj_is_kind_of(src, cCvSeq::rb_class()))
      cvMoments(CVARR(src), CVMOMENTS(self), TRUE_OR_FALSE(is_binary, 0));
    else
      rb_raise(rb_eTypeError, "argument 1 (src) should be %s or %s.",
	       rb_class2name(cCvMat::rb_class()), rb_class2name(cCvSeq::rb_class()));
  }
  return self;      
}
예제 #11
0
/*
 * Grabs, decodes and returns the next video frame.
 * @overload query
 * @return [IplImage] Next video frame
 * @return [nil] Failed to read next video frame
 * @opencv_func cvQueryFrame
 */
VALUE
rb_query(VALUE self)
{
  VALUE image = Qnil;
  IplImage *frame = NULL;
  try {
    if (!(frame = cvQueryFrame(CVCAPTURE(self)))) {
      return Qnil;
    }
    image = cIplImage::new_object(frame->width, frame->height,
				  CV_MAKETYPE(IPL2CV_DEPTH(frame->depth), frame->nChannels));
    if (frame->origin == IPL_ORIGIN_TL) {
      cvCopy(frame, CVARR(image));
    }
    else {
      cvFlip(frame, CVARR(image));
    }
  }
  catch (cv::Exception& e) {
    raise_cverror(e);
  }
  return image;
}
예제 #12
0
/*
 * call-seq:
 *   IplImage.smoothness(<i>lowFreqRatio, blankDensity, messyDensity, highFreqRatio</i>) -> [ symbol, float, float ]
 *
 * Determines if the image's smoothness is either, :smooth, :messy, or :blank.
 *
 * Original Author: [email protected]
 */
VALUE
rb_smoothness(int argc, VALUE *argv, VALUE self)
{
  VALUE lowFreqRatio, blankDensity, messyDensity, highFreqRatio;
  rb_scan_args(argc, argv, "04", &lowFreqRatio, &blankDensity, &messyDensity, &highFreqRatio);

  double f_lowFreqRatio, f_blankDensity, f_messyDensity, f_highFreqRatio;
  double outLowDensity, outHighDensity;
  if (TYPE(lowFreqRatio) == T_NIL) {
    f_lowFreqRatio = 10 / 128.0f;
  } else {
    Check_Type(lowFreqRatio, T_FLOAT);
    f_lowFreqRatio = NUM2DBL(lowFreqRatio);
  }
  if (TYPE(blankDensity) == T_NIL) {
    f_blankDensity = 1.2f;
  } else {
    Check_Type(blankDensity, T_FLOAT);
    f_blankDensity = NUM2DBL(blankDensity);
  }
  if (TYPE(messyDensity) == T_NIL) {
    f_messyDensity = 0.151f;
  } else {
    Check_Type(messyDensity, T_FLOAT);
    f_messyDensity = NUM2DBL(messyDensity);
  }
  if (TYPE(highFreqRatio) == T_NIL) {
    f_highFreqRatio = 5 / 128.0f;
  } else {
    Check_Type(highFreqRatio, T_FLOAT);
    f_highFreqRatio = NUM2DBL(highFreqRatio);
  }

  IplImage *pFourierImage;
  IplImage *p64DepthImage;

  // the image is required to be in depth of 64
  if (IPLIMAGE(self)->depth == 64) {
	p64DepthImage = NULL;
	pFourierImage = create_fourier_image(IPLIMAGE(self));
  } else {
	p64DepthImage = rb_cvCreateImage(cvGetSize(IPLIMAGE(self)), IPL_DEPTH_64F, 1);
	cvConvertScale(CVARR(self), p64DepthImage, 1.0, 0.0);
	pFourierImage = create_fourier_image(p64DepthImage);
  }

  Smoothness result = compute_smoothness(pFourierImage, f_lowFreqRatio, f_blankDensity, f_messyDensity, f_highFreqRatio, outLowDensity, outHighDensity);

  cvReleaseImage(&pFourierImage);
  if (p64DepthImage != NULL)
	cvReleaseImage(&p64DepthImage);

  switch(result)
  {
    case SMOOTH:
      return rb_ary_new3(3, ID2SYM(rb_intern("smooth")), rb_float_new(outLowDensity), rb_float_new(outHighDensity));
    case MESSY:
      return rb_ary_new3(3, ID2SYM(rb_intern("messy")), rb_float_new(outLowDensity), rb_float_new(outHighDensity));
    case BLANK:
      return rb_ary_new3(3, ID2SYM(rb_intern("blank")), rb_float_new(outLowDensity), rb_float_new(outHighDensity));
    default:
      return rb_ary_new3(3, NULL, rb_float_new(outLowDensity), rb_float_new(outHighDensity));
  }
}