Ejemplo n.º 1
0
// define a trackbar callback
void on_trackbar( int h )
{
    int j;
    int distStep = dist -> widthStep / 4;
    float* currPointer;
    
    cvThreshold( gray, edge,
                 ( float )( edge_thresh ),
                 ( float )( edge_thresh ),
                  CV_THRESH_BINARY );
    //Distance transform                  
    cvDistTransform( edge, dist,
                     CV_DIST_L2,
                     CV_DIST_MASK_5,
                     NULL );

    cvConvertScale( dist, dist, 5000.0, 0 );
    
    for( j = 0, currPointer = dist -> imageData; j < dist -> height; j++, currPointer += distStep )
    {
        cvbSqrt( ( float* )( currPointer ),
                 ( float* )( currPointer ),
                 dist -> width );            
    }
    
    cvConvertScale( dist, dist32s, 1.0, 0.5 );
    cvAndS( dist32s, cvScalarAll(255), dist32s, 0 );
    cvConvertScale( dist32s, dist8u1, 1, 0 );
    cvConvertScale( dist32s, dist32s, -1, 0 );
    cvAddS( dist32s, cvScalarAll(255), dist32s, 0 );
    cvConvertScale( dist32s, dist8u2, 1, 0 );
    cvCvtPlaneToPix( dist8u1, dist8u2, dist8u2, 0, dist8u );
    show_iplimage( wndname, dist8u );
}
Ejemplo n.º 2
0
static void node_composit_exec_cvAnd(void *data, bNode *node, bNodeStack **in, bNodeStack **out)
{
	CvArr* dst;
	CvArr* src1;
	CvArr* src2;
	CvArr* mask=NULL;
        CompBuf* dst_buf;

	if(out[0]->hasoutput==0) return;
	if((in[0]->data)){
                //Inputs
		src1 = BOCV_IplImage_attach(in[0]->data);
                mask = BOCV_Mask_attach(in[2]->data);
                
                //Output
                dst_buf=dupalloc_compbuf(in[0]->data);
                dst=BOCV_IplImage_attach(dst_buf);

                //Check Image - Mask sizes
                if(mask){
                    if (!BOCV_checkMask(src1, mask)){
                        node->error= 1;
                        return;
                    }
                }
                
                if(in[1]->data){
                    src2 = BOCV_IplImage_attach(in[1]->data);
                    //Checks
                    //Check Image Sizes
                    if(!BOCV_checkAreSameType(src1, src2)){
                        node->error= 1;
                        return;
                    }
                    //Check Image number Channels
                    if(!BOCV_checkSameNChannels(src1, src2)){
                        node->error= 1;
                        return;
                    }
                    cvAnd(src1, src2, dst, mask);
                    BOCV_IplImage_detach(src2);
                }else{
                    CvScalar s;
                    s.val[0]= (in[1]->vec[0]);
                    s.val[1]= (in[1]->vec[1]);
                    s.val[2]= (in[1]->vec[2]);
                    s.val[3]= 0;
                    cvAndS(src1, s, dst, mask);
                }
                
                out[0]->data= dst_buf;
		
                BOCV_IplImage_detach(src1);
                
                BOCV_IplImage_detach(mask);
                BOCV_IplImage_detach(dst);

	}
	
}
Ejemplo n.º 3
0
////////////////////////////////////////////////////////////////////////////////
// [http://stackoverflow.com/questions/4831813/image-segmentation-using-mean-shift-explained]
// "The Mean Shift segmentation is a local homogenization technique that is 
// very useful for damping shading or tonality differences in localized objects. 
// [...] replaces each pixel with the mean of the pixels in a range-r 
// neighborhood and whose value is within a distance d."
static void  posterize_image(IplImage* input)
{
  // this line reduces the amount of colours
  cvAndS( input, cvScalar(0xF8, 0xF8, 0xF8), input);
  
  int colour_radius =  4; // number of colour classes ~= 256/colour_radius
  int pixel_radius  =  2;
  int levels        =  1;
  cvPyrMeanShiftFiltering(input, input, pixel_radius, colour_radius, levels);
  
}
Ejemplo n.º 4
0
CvPoint CTransformImage::findCenter()
{
	IplImage* dist8u  = cvCloneImage(m_transImage);
	IplImage* dist32f = cvCreateImage(cvGetSize(m_transImage), IPL_DEPTH_32F, 1);
	IplImage* dist32s = cvCreateImage(cvGetSize(m_transImage), IPL_DEPTH_32S, 1);

	// 거리 변환 행렬
	float mask[3] = {1.f, 1.5f, 0};

	// 거리 변환 함수 사용
	cvDistTransform(m_transImage, dist32f, CV_DIST_USER, 3, mask, NULL);

	// 눈에 보이게 변환
	cvConvertScale(dist32f, dist32f, 1000, 0);
	cvPow(dist32f, dist32f, 0.5);

	cvConvertScale(dist32f, dist32s, 1.0, 0.5);
	cvAndS(dist32s, cvScalarAll(255), dist32s, 0);
	cvConvertScale(dist32s, dist8u, 1, 0);

	// 가장 큰 좌표를 찾는다
	int max;
	for(int i = max = 0; i < dist8u->height; ++i)
	{
		int index = i * dist8u->widthStep;
		for(int j = 0; j < dist8u->width; ++j)
		{
			if((unsigned char)dist8u->imageData[index+j] > max)
			{
				max = (unsigned char)dist8u->imageData[index+j];
				m_center.x = j, m_center.y = i;
			}
		}
	}

	cvReleaseImage(&dist8u);
	cvReleaseImage(&dist32f);
	cvReleaseImage(&dist32s);

	if(m_center.x < 0 || m_center.y < 0)
		m_center.x = 0, m_center.y = 0;

	CvBox2D box;
	box.center = cvPoint2D32f(m_center.x, m_center.y);
	box.size   = cvSize2D32f(3, 3);
	box.angle  = 90;
	cvEllipseBox(m_image, box, CV_RGB(255,242,0), 3);

	return m_center;
}
Ejemplo n.º 5
0
static GstFlowReturn gst_gcs_transform_ip(GstBaseTransform * btrans, GstBuffer * gstbuf) 
{
  GstGcs *gcs = GST_GCS (btrans);

  GST_GCS_LOCK (gcs);

  //////////////////////////////////////////////////////////////////////////////
  // get image data from the input, which is RGBA or BGRA
  gcs->pImageRGBA->imageData = (char*)GST_BUFFER_DATA(gstbuf);
  cvSplit(gcs->pImageRGBA,   gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, gcs->pImgChX );
  cvCvtColor(gcs->pImageRGBA,  gcs->pImgRGB, CV_BGRA2BGR);


  //////////////////////////////////////////////////////////////////////////////
  ////////////////////////////////////////////////////////MOTION CUES INTEGR////
  //////////////////////////////////////////////////////////////////////////////

  //////////////////////////////////////////////////////////////////////////////
  // apply step 1. filtering using bilateral filter. Cannot happen in-place => scratch
  cvSmooth(gcs->pImgRGB, gcs->pImgScratch, CV_BILATERAL, 3, 50, 3, 0);
  // create GRAY image
  cvCvtColor(gcs->pImgScratch, gcs->pImgGRAY, CV_BGR2GRAY);

  // Frame difference the GRAY and the previous one
  // not intuitive: first smooth frames, then 
  cvCopy( gcs->pImgGRAY,   gcs->pImgGRAY_copy,  NULL);
  cvCopy( gcs->pImgGRAY_1, gcs->pImgGRAY_1copy, NULL);
  get_frame_difference( gcs->pImgGRAY_copy, gcs->pImgGRAY_1copy, gcs->pImgGRAY_diff);
  cvErode( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, NULL, 3);
  cvDilate( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, NULL, 3);


  //////////////////////////////////////////////////////////////////////////////
  //////////////////////////////////////////////////////////////////////////////
  // ghost mapping
  gcs->dstTri[0].x = gcs->facepos.x - gcs->facepos.width/2 ;
  gcs->dstTri[0].y = gcs->facepos.y - gcs->facepos.height/2;
  gcs->dstTri[1].x = gcs->facepos.x - gcs->facepos.width/2;
  gcs->dstTri[1].y = gcs->facepos.y + gcs->facepos.height/2;
  gcs->dstTri[2].x = gcs->facepos.x + gcs->facepos.width/2;
  gcs->dstTri[2].y = gcs->facepos.y + gcs->facepos.height/2;

  if( gcs->ghostfilename){
    cvGetAffineTransform( gcs->srcTri, gcs->dstTri, gcs->warp_mat );
    cvWarpAffine( gcs->cvGhostBwResized, gcs->cvGhostBwAffined, gcs->warp_mat );
  }




  //////////////////////////////////////////////////////////////////////////////
  //////////////////////////////////////////////////////////////////////////////
  // GrabCut algorithm preparation and running

  gcs->facepos.x = gcs->facepos.x - gcs->facepos.width/2;
  gcs->facepos.y = gcs->facepos.y - gcs->facepos.height/2;

  // create an IplImage  with the skin colour pixels as 255
  compose_skin_matrix(gcs->pImgRGB, gcs->pImg_skin);
  // And the skin pixels with the movement mask
  cvAnd( gcs->pImg_skin,  gcs->pImgGRAY_diff,  gcs->pImgGRAY_diff);
  //cvErode( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(5, 5, 3, 3, CV_SHAPE_RECT,NULL), 1);
  cvDilate(gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(7,7, 5,5, CV_SHAPE_RECT,NULL), 2);
  cvErode( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(5,5, 3,3, CV_SHAPE_RECT,NULL), 2);

  // if there is alpha==all 1's coming in, then we ignore it: prevents from no vibe before us
  if((0.75*(gcs->width * gcs->height) <= cvCountNonZero(gcs->pImgChX)))
    cvZero(gcs->pImgChX);
  // OR the input Alpha
  cvOr( gcs->pImgChX,  gcs->pImgGRAY_diff,  gcs->pImgGRAY_diff);


  //////////////////////////////////////////////////////////////////////////////
  // try to consolidate a single mask from all the sub-patches
  cvDilate(gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(7,7, 5,5, CV_SHAPE_RECT,NULL), 3);
  cvErode( gcs->pImgGRAY_diff, gcs->pImgGRAY_diff, cvCreateStructuringElementEx(5,5, 3,3, CV_SHAPE_RECT,NULL), 4);

  //////////////////////////////////////////////////////////////////////////////
  // use either Ghost or boxes-model to create a PR foreground starting point in gcs->grabcut_mask
  if( gcs->ghostfilename)
    compose_grabcut_seedmatrix3(gcs->grabcut_mask, gcs->cvGhostBwAffined, gcs->pImgGRAY_diff  );
  else{
    // toss it all to the bbox creation function, together with the face position and size
    compose_grabcut_seedmatrix2(gcs->grabcut_mask, gcs->facepos, gcs->pImgGRAY_diff, gcs->facefound );
  }


  //////////////////////////////////////////////////////////////////////////////
#ifdef KMEANS
  gcs->num_clusters = 18; // keep it even to simplify integer arithmetics
  cvCopy(gcs->pImgRGB, gcs->pImgRGB_kmeans, NULL);
  posterize_image(gcs->pImgRGB_kmeans);
  create_kmeans_clusters(gcs->pImgRGB_kmeans, gcs->kmeans_points, gcs->kmeans_clusters, 
                         gcs->num_clusters, gcs->num_samples);
  adjust_bodybbox_w_clusters(gcs->grabcut_mask, gcs->pImgRGB_kmeans, gcs->num_clusters, gcs->facepos);
#endif //KMEANS


  //////////////////////////////////////////////////////////////////////////////
  if( gcs->debug < 70)
    run_graphcut_iteration( &(gcs->GC), gcs->pImgRGB, gcs->grabcut_mask, &gcs->bbox_prev);



  // get a copy of GRAY for the next iteration
  cvCopy(gcs->pImgGRAY, gcs->pImgGRAY_1, NULL);

  //////////////////////////////////////////////////////////////////////////////
  // if we want to display, just overwrite the output
  if( gcs->display ){
    int outputimage = gcs->debug;
    switch( outputimage ){
    case 1: // output the GRAY difference
      cvCvtColor( gcs->pImgGRAY_diff, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 50:// Ghost remapped
      cvCvtColor( gcs->cvGhostBwAffined, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 51:// Ghost applied
      cvAnd( gcs->cvGhostBwAffined, gcs->pImgGRAY, gcs->pImgGRAY, NULL );
      cvCvtColor( gcs->pImgGRAY, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 60:// Graphcut
      cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get only FG
      cvConvertScale( gcs->grabcut_mask, gcs->grabcut_mask, 127.0);
      cvCvtColor( gcs->grabcut_mask, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 61:// Graphcut applied on input/output image
      cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get only FG, PR_FG
      cvConvertScale( gcs->grabcut_mask, gcs->grabcut_mask, 255.0);
      cvAnd( gcs->grabcut_mask,  gcs->pImgGRAY,  gcs->pImgGRAY, NULL);
      cvCvtColor( gcs->pImgGRAY, gcs->pImgRGB, CV_GRAY2BGR );

      cvRectangle(gcs->pImgRGB, cvPoint(gcs->bbox_now.x, gcs->bbox_now.y), 
                  cvPoint(gcs->bbox_now.x + gcs->bbox_now.width, gcs->bbox_now.y+gcs->bbox_now.height),
                  cvScalar(127,0.0), 1, 8, 0 );
     break;
    case 70:// bboxes
      cvZero( gcs->pImgGRAY );
      cvMul( gcs->grabcut_mask,  gcs->grabcut_mask,  gcs->pImgGRAY, 40.0 );
      cvCvtColor( gcs->pImgGRAY, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 71:// bboxes applied on the original image
      cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get only FG, PR_FG
      cvMul( gcs->grabcut_mask,  gcs->pImgGRAY,  gcs->pImgGRAY, 1.0 );
      cvCvtColor( gcs->pImgGRAY, gcs->pImgRGB, CV_GRAY2BGR );
      break;
    case 72: // input alpha channel mapped to output
      cvCvtColor( gcs->pImgChX, gcs->pImgRGB, CV_GRAY2BGR );
      break;
#ifdef KMEANS
    case 80:// k-means output
      cvCopy(gcs->pImgRGB_kmeans, gcs->pImgRGB, NULL);
      break;
    case 81:// k-means output filtered with bbox/ghost mask
      cvSplit(gcs->pImgRGB_kmeans, gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, NULL        );
      cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get FG and PR_FG
      cvConvertScale( gcs->grabcut_mask, gcs->grabcut_mask, 255.0);     // scale any to 255.

      cvAnd( gcs->grabcut_mask,  gcs->pImgCh1,  gcs->pImgCh1, NULL );
      cvAnd( gcs->grabcut_mask,  gcs->pImgCh2,  gcs->pImgCh2, NULL );
      cvAnd( gcs->grabcut_mask,  gcs->pImgCh3,  gcs->pImgCh3, NULL );

      cvMerge(              gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, NULL, gcs->pImgRGB);
      break;
#endif //KMEANS
    default:
      break;
    }
  }

  //////////////////////////////////////////////////////////////////////////////
  // copy anyhow the fg/bg to the alpha channel in the output image alpha ch
  cvSplit(gcs->pImgRGB, gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, NULL        );
  cvAndS(gcs->grabcut_mask, cvScalar(1), gcs->grabcut_mask, NULL);  // get only FG and possible FG
  cvConvertScale( gcs->grabcut_mask, gcs->grabcut_mask, 255.0);
  gcs->pImgChA->imageData = (char*)gcs->grabcut_mask->data.ptr;

  cvMerge(              gcs->pImgCh1, gcs->pImgCh2, gcs->pImgCh3, gcs->pImgChA, gcs->pImageRGBA);

  gcs->numframes++;

  GST_GCS_UNLOCK (gcs);  
  
  return GST_FLOW_OK;
}
Ejemplo n.º 6
0
// threshold trackbar callback
void on_trackbar( int dummy )
{
    static const uchar colors[][3] = 
    {
        {0,0,0},
        {255,0,0},
        {255,128,0},
        {255,255,0},
        {0,255,0},
        {0,128,255},
        {0,255,255},
        {0,0,255},
        {255,0,255}
    };
    
    int msize = mask_size;
    int _dist_type = build_voronoi ? CV_DIST_L2 : dist_type;

    cvThreshold( gray, edge, (float)edge_thresh, (float)edge_thresh, CV_THRESH_BINARY );

    if( build_voronoi )
        msize = CV_DIST_MASK_5;

    if( _dist_type == CV_DIST_L1 )
    {
        cvDistTransform( edge, edge, _dist_type, msize, NULL, NULL );
        cvConvert( edge, dist );
    }
    else
        cvDistTransform( edge, dist, _dist_type, msize, NULL, build_voronoi ? labels : NULL );

    if( !build_voronoi )
    {
        // begin "painting" the distance transform result
        cvConvertScale( dist, dist, 5000.0, 0 );
        cvPow( dist, dist, 0.5 );
    
        cvConvertScale( dist, dist32s, 1.0, 0.5 );
        cvAndS( dist32s, cvScalarAll(255), dist32s, 0 );
        cvConvertScale( dist32s, dist8u1, 1, 0 );
        cvConvertScale( dist32s, dist32s, -1, 0 );
        cvAddS( dist32s, cvScalarAll(255), dist32s, 0 );
        cvConvertScale( dist32s, dist8u2, 1, 0 );
        cvMerge( dist8u1, dist8u2, dist8u2, 0, dist8u );
        // end "painting" the distance transform result
    }
    else
    {
        int i, j;
        for( i = 0; i < labels->height; i++ )
        {
            int* ll = (int*)(labels->imageData + i*labels->widthStep);
            float* dd = (float*)(dist->imageData + i*dist->widthStep);
            uchar* d = (uchar*)(dist8u->imageData + i*dist8u->widthStep);
            for( j = 0; j < labels->width; j++ )
            {
                int idx = ll[j] == 0 || dd[j] == 0 ? 0 : (ll[j]-1)%8 + 1;
                int b = cvRound(colors[idx][0]);
                int g = cvRound(colors[idx][1]);
                int r = cvRound(colors[idx][2]);
                d[j*3] = (uchar)b;
                d[j*3+1] = (uchar)g;
                d[j*3+2] = (uchar)r;
            }
        }
    }
    
    cvShowImage( wndname, dist8u );
}
Ejemplo n.º 7
0
void cv_AndS(const CvArr* src, CvScalar* value, CvArr* dst, const CvArr* mask) {
  cvAndS(src, *value, dst, mask);
}
Ejemplo n.º 8
0
CV_IMPL double
cvThreshold( const void* srcarr, void* dstarr, double thresh, double maxval, int type )
{
    CvHistogram* hist = 0;
    
    CV_FUNCNAME( "cvThreshold" );

    __BEGIN__;

    CvSize roi;
    int src_step, dst_step;
    CvMat src_stub, *src = (CvMat*)srcarr;
    CvMat dst_stub, *dst = (CvMat*)dstarr;
    CvMat src0, dst0;
    int coi1 = 0, coi2 = 0;
    int ithresh, imaxval, cn;
    bool use_otsu;

    CV_CALL( src = cvGetMat( src, &src_stub, &coi1 ));
    CV_CALL( dst = cvGetMat( dst, &dst_stub, &coi2 ));

    if( coi1 + coi2 )
        CV_ERROR( CV_BadCOI, "COI is not supported by the function" );

    if( !CV_ARE_CNS_EQ( src, dst ) )
        CV_ERROR( CV_StsUnmatchedFormats, "Both arrays must have equal number of channels" );

    cn = CV_MAT_CN(src->type);
    if( cn > 1 )
    {
        src = cvReshape( src, &src0, 1 );
        dst = cvReshape( dst, &dst0, 1 );
    }

    use_otsu = (type & ~CV_THRESH_MASK) == CV_THRESH_OTSU;
    type &= CV_THRESH_MASK;

    if( use_otsu )
    {
        float _ranges[] = { 0, 256 };
        float* ranges = _ranges;
        int hist_size = 256;
        void* srcarr0 = src;

        if( CV_MAT_TYPE(src->type) != CV_8UC1 )
            CV_ERROR( CV_StsNotImplemented, "Otsu method can only be used with 8uC1 images" );

        CV_CALL( hist = cvCreateHist( 1, &hist_size, CV_HIST_ARRAY, &ranges ));
        cvCalcArrHist( &srcarr0, hist );
        thresh = cvFloor(icvGetThreshVal_Otsu( hist ));
    }

    if( !CV_ARE_DEPTHS_EQ( src, dst ) )
    {
        if( CV_MAT_TYPE(dst->type) != CV_8UC1 )
            CV_ERROR( CV_StsUnsupportedFormat, "In case of different types destination should be 8uC1" );

        if( type != CV_THRESH_BINARY && type != CV_THRESH_BINARY_INV )
            CV_ERROR( CV_StsBadArg,
            "In case of different types only CV_THRESH_BINARY "
            "and CV_THRESH_BINARY_INV thresholding types are supported" );

        if( maxval < 0 )
        {
            CV_CALL( cvSetZero( dst ));
        }
        else
        {
            CV_CALL( cvCmpS( src, thresh, dst, type == CV_THRESH_BINARY ? CV_CMP_GT : CV_CMP_LE ));
            if( maxval < 255 )
                CV_CALL( cvAndS( dst, cvScalarAll( maxval ), dst ));
        }
        EXIT;
    }

    if( !CV_ARE_SIZES_EQ( src, dst ) )
        CV_ERROR( CV_StsUnmatchedSizes, "" );

    roi = cvGetMatSize( src );
    if( CV_IS_MAT_CONT( src->type & dst->type ))
    {
        roi.width *= roi.height;
        roi.height = 1;
        src_step = dst_step = CV_STUB_STEP;
    }
    else
    {
        src_step = src->step;
        dst_step = dst->step;
    }

    switch( CV_MAT_DEPTH(src->type) )
    {
    case CV_8U:
        
        ithresh = cvFloor(thresh);
        imaxval = cvRound(maxval);
        if( type == CV_THRESH_TRUNC )
            imaxval = ithresh;
        imaxval = CV_CAST_8U(imaxval);

        if( ithresh < 0 || ithresh >= 255 )
        {
            if( type == CV_THRESH_BINARY || type == CV_THRESH_BINARY_INV ||
                ((type == CV_THRESH_TRUNC || type == CV_THRESH_TOZERO_INV) && ithresh < 0) ||
                (type == CV_THRESH_TOZERO && ithresh >= 255) )
            {
                int v = type == CV_THRESH_BINARY ? (ithresh >= 255 ? 0 : imaxval) :
                        type == CV_THRESH_BINARY_INV ? (ithresh >= 255 ? imaxval : 0) :
                        type == CV_THRESH_TRUNC ? imaxval : 0;

                cvSet( dst, cvScalarAll(v) );
                EXIT;
            }
            else
            {
                cvCopy( src, dst );
                EXIT;
            }
        }

        if( type == CV_THRESH_BINARY || type == CV_THRESH_BINARY_INV )
        {
            if( icvCompareC_8u_C1R_cv_p && icvAndC_8u_C1R_p )
            {
                IPPI_CALL( icvCompareC_8u_C1R_cv_p( src->data.ptr, src_step,
                    (uchar)ithresh, dst->data.ptr, dst_step, roi,
                    type == CV_THRESH_BINARY ? cvCmpGreater : cvCmpLessEq ));

                if( imaxval < 255 )
                    IPPI_CALL( icvAndC_8u_C1R_p( dst->data.ptr, dst_step,
                    (uchar)imaxval, dst->data.ptr, dst_step, roi ));
                EXIT;
            }
        }
        else if( type == CV_THRESH_TRUNC || type == CV_THRESH_TOZERO_INV )
        {
            if( icvThreshold_GTVal_8u_C1R_p )
            {
                IPPI_CALL( icvThreshold_GTVal_8u_C1R_p( src->data.ptr, src_step,
                    dst->data.ptr, dst_step, roi, (uchar)ithresh,
                    (uchar)(type == CV_THRESH_TRUNC ? ithresh : 0) ));
                EXIT;
            }
        }
        else
        {
            assert( type == CV_THRESH_TOZERO );
            if( icvThreshold_LTVal_8u_C1R_p )
            {
                ithresh = cvFloor(thresh+1.);
                ithresh = CV_CAST_8U(ithresh);
                IPPI_CALL( icvThreshold_LTVal_8u_C1R_p( src->data.ptr, src_step,
                    dst->data.ptr, dst_step, roi, (uchar)ithresh, 0 ));
                EXIT;
            }
        }

        icvThresh_8u_C1R( src->data.ptr, src_step,
                          dst->data.ptr, dst_step, roi,
                          (uchar)ithresh, (uchar)imaxval, type );
        break;
    case CV_32F:

        if( type == CV_THRESH_TRUNC || type == CV_THRESH_TOZERO_INV )
        {
            if( icvThreshold_GTVal_32f_C1R_p )
            {
                IPPI_CALL( icvThreshold_GTVal_32f_C1R_p( src->data.fl, src_step,
                    dst->data.fl, dst_step, roi, (float)thresh,
                    type == CV_THRESH_TRUNC ? (float)thresh : 0 ));
                EXIT;
            }
        }
        else if( type == CV_THRESH_TOZERO )
        {
            if( icvThreshold_LTVal_32f_C1R_p )
            {
                IPPI_CALL( icvThreshold_LTVal_32f_C1R_p( src->data.fl, src_step,
                    dst->data.fl, dst_step, roi, (float)(thresh*(1 + FLT_EPSILON)), 0 ));
                EXIT;
            }
        }

        icvThresh_32f_C1R( src->data.fl, src_step, dst->data.fl, dst_step, roi,
                           (float)thresh, (float)maxval, type );
        break;
    default:
        CV_ERROR( CV_BadDepth, cvUnsupportedFormat );
    }

    __END__;

    if( hist )
        cvReleaseHist( &hist );

    return thresh;
}