예제 #1
0
파일: lbt.cpp 프로젝트: ppdg123/LBtool
IplImage * showDegree(IplImage * img)
{
	int bar_height = 10;
	int height = img->height;
	int width = img->width;
	int bar_width = 100>width?100:width;
	IplImage * res;
	res = cvCreateImage(cvSize(bar_width,bar_height+height),IPL_DEPTH_8U,3);
	int i,j;
	for(i=0;i<res->height;i++)
	{
	   for(j=0;j<res->width;j++)
	   {
		   CV_IMAGE_ELEM(res,uchar,i,j*3) = 255;
		   CV_IMAGE_ELEM(res,uchar,i,j*3+1) = 255;
		   CV_IMAGE_ELEM(res,uchar,i,j*3+2) = 255;
	   }
	}
	cvSetImageROI(res,cvRect((bar_width-width)/2,0,width,height));
	cvCopy(img,res);
	cvResetImageROI(res);
	cvReleaseImage(&img);
	int rectwidth = bar_width*degree/9;
	cvRectangle(res,cvPoint(0,height+1),cvPoint(rectwidth,height+bar_height),cvScalar(255,0,0),-1);
	//cvNamedWindow("ppf",CV_WINDOW_NORMAL);
	//cvShowImage("ppf",res);
	//cvWaitKey();
	//while(1);
	return res;
}
예제 #2
0
	void AddOneMask(IplImage *bdmask, int r, int c, vector<CvPoint> &border) {
		total++;

		if (CV_IMAGE_ELEM(bdmask, BYTE, r, c) == 0)
			return;

		border.push_back(cvPoint(c, r));
		CV_IMAGE_ELEM(bdmask, BYTE, r, c) = 0;

		while (true) {
			int d;
			for (d = 0; d < 8; d++)
				if (0 <= r + dirs[d][0] && r + dirs[d][0] < bdmask->height &&
					0 <= c + dirs[d][1] && c + dirs[d][1] < bdmask->width)
					if (CV_IMAGE_ELEM(bdmask, BYTE, r + dirs[d][0], c + dirs[d][1]) == 255) {
						break;
					}
					if (d == 8)
						break;
					r += dirs[d][0];
					c += dirs[d][1];
					border.push_back(cvPoint(c, r));
					CV_IMAGE_ELEM(bdmask, BYTE, r, c) = 0;
		}
		border.push_back(cvPoint(-1, -1));
	}
예제 #3
0
	void PM(IplImage *m, char *filename) {
		if (filename) {
			FILE *f = fopen(filename, "w");
			CvSize size = cvGetSize(m);
			char buf[50];
			for (int i = 0; i < size.height; i++) {
				fprintf(f, "{");
				for (int j = 0; j < size.width; j++) {
					sprintf(buf, "%f", CV_IMAGE_ELEM(m, BYTE, i, j));
					fprintf(f, "%15s, ", buf);
				}
				fprintf(f, "},");
				fprintf(f, "\n");
			}
			fclose(f);
		}
		else {
			CvSize size = cvGetSize(m);
			for (int r = 0; r < size.height; r++) {
				for (int c = 0; c < size.width; c++)
					cout << (float)CV_IMAGE_ELEM(m, BYTE, r, c) << ' ';
				cout << endl;
			}
		}
	}
예제 #4
0
	IplImage* SegmentManager::GetThreeDivision(IplImage* src, int featureTop,int featureBottom,int toleranceTop,int toleranceBottom) {

		if (src == NULL)
			return NULL;

		topValue = featureTop;
		bottomValue = featureBottom;
		topToleranceValue = toleranceTop;
		bottomToleranceValue = toleranceBottom;

		if (divisionImg == NULL) {
			divisionImg = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);
			activeImgs.push_back(divisionImg);
		}

		cvCopy(src, divisionImg, NULL);
		for (int i = 0; i < divisionImg->height; i++)
			for (int j = 0; j < divisionImg->width; j++) {
				uchar bright = CV_IMAGE_ELEM(src, uchar, i, j);
				int topDelta = abs(bright - featureTop);
				int bottomDelta = abs(bright - featureBottom);
				if (topDelta <= toleranceTop && bottomDelta <= toleranceBottom){
					CV_IMAGE_ELEM(divisionImg, uchar, i, j) = topDelta <= bottomDelta ? LABEL_TOP: LABEL_BOTTOM;
				}
				else if (topDelta <= toleranceTop)
					CV_IMAGE_ELEM(divisionImg, uchar, i, j) = LABEL_TOP;
				else if (bottomDelta <= toleranceBottom)
					CV_IMAGE_ELEM(divisionImg, uchar, i, j) = LABEL_BOTTOM;
				else
					CV_IMAGE_ELEM(divisionImg, uchar, i, j) = LABEL_INTERMEDIATE;
			}

		return divisionImg;
	}
예제 #5
0
	bool IsColor(IplImage *img, int x, int y, int cid) {
		if (CV_IMAGE_ELEM(img, BYTE, y, x * 3 + 0) == colors[cid][2] &&
			CV_IMAGE_ELEM(img, BYTE, y, x * 3 + 1) == colors[cid][1] &&
			CV_IMAGE_ELEM(img, BYTE, y, x * 3 + 2) == colors[cid][0])
			return true;
		else
			return false;	
	}
예제 #6
0
int color_column(IplImage *img,struct rect_char chars)
{
    for (int y = chars.y ; y < chars.y + chars.height; y++)
    {
        CV_IMAGE_ELEM(img,uchar,y,chars.x) = 128;
        CV_IMAGE_ELEM(img,uchar,y,chars.x+ chars.width) = 128;
    }

    return 0;
}
예제 #7
0
void FacePredict::FaceSynthesis(AAM_Shape &shape, CvMat* texture, IplImage* newImage)
{
	double thisfacewidth = shape.GetWidth();
	shape.Scale(stdwidth / thisfacewidth);
	shape.Translate(-shape.MinX(), -shape.MinY());

	AAM_PAW paw;
	CvMat* points = cvCreateMat (1, __shape.nPoints(), CV_32FC2);
	CvMemStorage* storage = cvCreateMemStorage(0);
	paw.Train(shape, points, storage, __paw.GetTri(), false);  //the actual shape

	__AAMRefShape.Translate(-__AAMRefShape.MinX(), -__AAMRefShape.MinY());  //refShape, central point is at (0,0);translate the min to (0,0)
	double minV, maxV;
	cvMinMaxLoc(texture, &minV, &maxV);
	cvConvertScale(texture, texture, 1/(maxV-minV)*255, -minV*255/(maxV-minV));

	cvZero(newImage);

	int x1, x2, y1, y2, idx1 = 0, idx2 = 0;
	int tri_idx, v1, v2, v3;
	int minx, miny, maxx, maxy;
	minx = shape.MinX(); miny = shape.MinY();
	maxx = shape.MaxX(); maxy = shape.MaxY();
	for(int y = miny; y < maxy; y++)
	{
		y1 = y-miny;
		for(int x = minx; x < maxx; x++)
		{
			x1 = x-minx;
			idx1 = paw.Rect(y1, x1);
			if(idx1 >= 0)
			{
				tri_idx = paw.PixTri(idx1);
				v1 = paw.Tri(tri_idx, 0);
				v2 = paw.Tri(tri_idx, 1);
				v3 = paw.Tri(tri_idx, 2);
		
				x2 = paw.Alpha(idx1)*__AAMRefShape[v1].x + paw.Belta(idx1)*__AAMRefShape[v2].x +  
					paw.Gamma(idx1)*__AAMRefShape[v3].x;
				y2 = paw.Alpha(idx1)*__AAMRefShape[v1].y + paw.Belta(idx1)*__AAMRefShape[v2].y +  
					paw.Gamma(idx1)*__AAMRefShape[v3].y;
				
				idx2 = __paw.Rect(y2, x2);
				if(idx2 < 0) continue;

				CV_IMAGE_ELEM(newImage, byte, y, 3*x) = cvmGet(texture, 0, 3*idx2);
				CV_IMAGE_ELEM(newImage, byte, y, 3*x+1) = cvmGet(texture, 0, 3*idx2+1);
				CV_IMAGE_ELEM(newImage, byte, y, 3*x+2) = cvmGet(texture, 0, 3*idx2+2);
			}
		}
	}
	cvReleaseMat(&points);
	cvReleaseMemStorage(&storage);
}
예제 #8
0
파일: cvgabor.cpp 프로젝트: ToMadoRe/v4r
/**
 * @brief CvGabor::conv_img(IplImage *src, IplImage *dst, int Type)
 * @param src
 * @param dst
 * @param Type
 */
void CvGabor::conv_img(IplImage *src, IplImage *dst, int Type)   //函数名:conv_img
{
// printf("CvGabor::conv_img 1\n");
  double ve; //, re,im;
  
  CvMat *mat = cvCreateMat(src->width, src->height, CV_32FC1);
  for (int i = 0; i < src->width; i++) {
    for (int j = 0; j < src->height; j++) {
      ve = CV_IMAGE_ELEM(src, uchar, j, i);   //CV_IMAGE_ELEM 是取图像(j,i)位置的像素值
      CV_MAT_ELEM(*mat, float, i, j) = (float)ve;  //转化成float 类型
    }
  }
  
// printf("CvGabor::conv_img 2\n");
  CvMat *rmat = cvCreateMat(src->width, src->height, CV_32FC1);
  CvMat *imat = cvCreateMat(src->width, src->height, CV_32FC1);
  
  switch (Type)
  {
    case CV_GABOR_REAL:
      cvFilter2D( (CvMat*)mat, (CvMat*)mat, (CvMat*)Real, cvPoint( (Width-1)/2, (Width-1)/2));
      break;
    case CV_GABOR_IMAG:
      cvFilter2D( (CvMat*)mat, (CvMat*)mat, (CvMat*)Imag, cvPoint( (Width-1)/2, (Width-1)/2));
      break;
    case CV_GABOR_MAG:
      cvFilter2D( (CvMat*)mat, (CvMat*)rmat, (CvMat*)Real, cvPoint( (Width-1)/2, (Width-1)/2));
      cvFilter2D( (CvMat*)mat, (CvMat*)imat, (CvMat*)Imag, cvPoint( (Width-1)/2, (Width-1)/2));
      
      cvPow(rmat,rmat,2); 
      cvPow(imat,imat,2);
      cvAdd(imat,rmat,mat); 
      cvPow(mat,mat,0.5); 
      break;
    case CV_GABOR_PHASE:
      break;
  }
  
// printf("CvGabor::conv_img 3\n");
  if (dst->depth == IPL_DEPTH_8U)
  {
    cvNormalize((CvMat*)mat, (CvMat*)mat, 0, 255, CV_MINMAX);
    for (int i = 0; i < mat->rows; i++)
    {
      for (int j = 0; j < mat->cols; j++)
      {
        ve = CV_MAT_ELEM(*mat, float, i, j);
        CV_IMAGE_ELEM(dst, uchar, j, i) = (uchar)cvRound(ve);
      }
    }
  }
예제 #9
0
//细化函数入口
int thin(char*input)
{
	//if(argc!=2)
	//{
	//	return 0;
	//}

	IplImage *pSrc = NULL,*pDst = NULL,*pTmp = NULL;

	//传入一个灰度图像
	pSrc = cvLoadImage(input,CV_LOAD_IMAGE_GRAYSCALE);
	if(!pSrc)
	{
		return 0;
	}
	pTmp = cvCloneImage(pSrc);
	pDst = cvCreateImage(cvGetSize(pSrc),pSrc->depth,pSrc->nChannels);
	cvZero(pDst);
	cvThreshold(pSrc,pTmp,128,1,CV_THRESH_BINARY_INV);//做二值处理,将图像转换成0,1格式
	//cvSaveImage("c://Threshold.bmp",pTmp,0);
	cvThin(pTmp,pDst,8);//细化,通过修改iterations参数进一步细化
	cvNamedWindow("src",1);
	cvNamedWindow("dst",1);
	cvShowImage("src",pSrc);
	//将二值图像转换成灰度,以便显示
	int i = 0,j = 0;
	CvSize size = cvGetSize(pDst);
	for(i=0; i<size.height;  i++)
	{
		for(j=0; j<size.width; j++)
		{
			if(CV_IMAGE_ELEM(pDst,uchar,i,j)==1)
			{
				CV_IMAGE_ELEM(pDst,uchar,i,j) = 0;
			}
			else
			{
				CV_IMAGE_ELEM(pDst,uchar,i,j) = 255;
			}
		}
	}
	//cvSaveImage("c://thin.bmp",pDst);
	cvShowImage("dst",pDst);
	cvWaitKey(0);
	cvReleaseImage(&pSrc);
	cvReleaseImage(&pDst);
	cvReleaseImage(&pTmp);
	cvDestroyWindow("src");
	cvDestroyWindow("dst");
	return 0;
} 
예제 #10
0
	IplImage* SegmentManager::GetThreeDivisionAuto(IplImage* src, int tstep) {
		for (int i = 0; i < src->height; i++)
			for (int j = 0; j < src->width; j++) {
				uchar bright = CV_IMAGE_ELEM(src, uchar, i, j);
				brightness[bright]++;
			}
		int hill[256];
		int peak1 = -1, peak2 = -1, peakVal1 = 0, peakVal2 = 0;
		for (int i = 0; i < MAX_COLOR ; i++) {
			hill[i] = 0;
			for (int j = i - tstep + 1; j <= i + tstep - 1; j++) {
				if (j >= 0 && j < MAX_COLOR)
					hill[i] += brightness[j];
			}
			if (hill[i] > peakVal1) {
				peakVal1 = hill[i];
				peak1 = i;
			}
		}
		for (int i = 0; i < MAX_COLOR;i++)
			if (hill[i]>peakVal2 && i != peak1) {
				peakVal2 = hill[i];
				peak2 = i;
			}

		if (peak1 < 0 || peak2 < 0)
			return NULL;
		if (peak2 < peak1) {
			int tmp = peak1;
			peak1 = peak2;
			peak2 = tmp;
		}

		IplImage* ret = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);
		activeImgs.push_back(ret);

		cvCopy(src, ret, NULL);
		for (int i = 0; i < ret->height; i++)
			for (int j = 0; j < ret->width; j++) {
				uchar bright = CV_IMAGE_ELEM(src, uchar, i, j);
				if (bright < peak1)
					CV_IMAGE_ELEM(ret, uchar, i, j) = LABEL_BOTTOM;
				else if (bright > peak2)
					CV_IMAGE_ELEM(ret, uchar, i, j) = LABEL_TOP;
				else
					CV_IMAGE_ELEM(ret, uchar, i, j) = LABEL_INTERMEDIATE;
			}

		return ret;
	}
예제 #11
0
int maxValue(IplImage* img, CvRect rect)
{
	uchar maxval = 0;
	for (int i = rect.y; i < rect.y + rect.height; ++i)
	{
		for (int j = rect.x; j < rect.x + rect.width; ++j)
		{
			if (CV_IMAGE_ELEM(img, uchar, i, j) > maxval)
			{
				maxval = CV_IMAGE_ELEM(img, uchar, i, j);
			}
		}
	}
	return maxval;
}
예제 #12
0
파일: lbt.cpp 프로젝트: ppdg123/LBtool
IplImage * showSp(IplImage * img)
{
	int i,j;
	for(i=0;i<img->height;i++)
	{
		for(j=0;j<img->width;j++)
		{
			if(sp_val[(int)cvmGet(spmat,i,j)-1]==cur_point_ptr)
			{
				CV_IMAGE_ELEM(img,uchar,i,j*3+1) = 150;
				CV_IMAGE_ELEM(img,uchar,i,j*3+2) = 250;
			}
		}
	}
	return img;
}
예제 #13
0
파일: touchable.cpp 프로젝트: cers/iwb
    void Touchable::detectTouch(IplImage* mask) {
        int x,y,c = 0;
        if (this->interaction > 1)
            this->interaction--;
        if (mask == NULL) {
            printf("!!!! OMG OMG mask is NULLL LLLLL\n");
            return;
        }
//        for (x=this->cameraUL.x; x<this->cameraBR.x; x++)
//            for (y=this->cameraUL.y; y<this->cameraBR.y; y++)
        for (x=this->cameraUL.x; x<this->cameraBR.x; x++)
            for (y=this->cameraBR.y; y<this->cameraBR.y+10; y++)
                if (CV_IMAGE_ELEM( mask, uchar, y, x)) {
                    c++;
                    if (c >= this->threshold) {
                        this->interaction += 2;
                    }
                    if (this->interaction > INTERACTION_TRIGGER) {
                        printf("DETECTED TOUCH!\n");
//                        cvSaveImage("touchedMask.jpg", mask);
                        if (this->action != NULL)
                            this->action();
                        this->interaction = 0;
                        return;
                    }

                }
    }
void CEnsemble3dExt::SaveResult(const char* folder_name, int z)
{
	string path = folder_name;
	char dirc[100];
	int n = sprintf(dirc,"000%03d.png", z );
    int x,y;
	string dir = path + dirc;
	uchar pixel;

	//printf ("%s\n", dir.c_str());
	//cvSaveImage(dir.c_str(), outmask);

	for(x=0; x<512; x++){
        for(y=0;y<512; y++){


            Mat* slice = _temporal[z];
//            cv::namedWindow( "Display window", cv::WINDOW_AUTOSIZE );// Create a window for display.
//            cv::imshow( "Display window", *slice );                   // Show our image inside it.
//
//            cv::waitKey(0);
            pixel = CV_IMAGE_ELEM(outmask,uchar,y,x);

            slice->at<uchar>(cvPoint(x,y)) = pixel;

        }

	}
}
예제 #15
0
void ImageIOSaltEffect(IplImage *src, IplImage **dst, double percent)
{
	int total = src->width * src->height;
	int range_noise = (double) total * percent;
	printf("Total=%d, range_noise=%d", total, range_noise);
	int i;
	srand(time(NULL));
	if(*dst)
	{
		cvReleaseImage(dst);
		*dst = NULL;
	}
	else
	{
		//*dst = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, src->nChannels);
		*dst = cvCloneImage(src);
	}
	//dst = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, src->nChannels);
	for(i=0; i<range_noise; i++)
	{
		int ch;
		CvPoint pt = cvPoint(rand()%(*dst)->height, rand()%(*dst)->width);
		for(ch=0; ch<(*dst)->nChannels; ch++)
		{
			CV_IMAGE_ELEM(*dst, uchar, pt.x, pt.y*(*dst)->nChannels + ch) = rand()%256;
		}
	}
}
예제 #16
0
void
Histogram::show_histogram()
{
	IplImage* histogram_img = cvCreateImage(cvSize(get_max_value()+1,m_height),8,1);
	cvZero(histogram_img);
	for(int i=0;i<m_height;i++)
	{
		int next_value = m_histo_values[i];
		int j=m_height-1;
		while (CV_IMAGE_ELEM(histogram_img,uchar, j, next_value)==255)
			--j;
		CV_IMAGE_ELEM(histogram_img,uchar, j, next_value) = 255;
	}
	cvNamedWindow("Histogram",0);
	cvResizeWindow("Histogram",5*get_max_value(),5*m_height);
	cvShowImage("Histogram", histogram_img );
	cvReleaseImage(&histogram_img);
}
예제 #17
0
void modificarPixels(int grupo, int dir,IplImage *imagen, int valorNuevo,int **direction, int tamFila, int tamColumna ){
	
	int i;	
	for (i=0; i < dir; i++) {
		CV_IMAGE_ELEM(imagen,uchar,I_INDICE( direction[grupo][i],tamFila),
					  J_INDICE( direction[grupo][i],tamFila))=1;
	}
	
}
예제 #18
0
void ofxCvWatershed::segment() {
    reset();
    int nContours = cvFindContours( iplMarkersTempImg, storage, &contours, sizeof(CvContour),
                                   CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );
    
    int i, j, compCount = 0;
    
    cvZero( iplMarkers32sImg );
    for( ; contours != 0; contours = contours->h_next, compCount++ ) {
        cvDrawContours( 
                       iplMarkers32sImg, 
                       contours, 
                       cvScalarAll(compCount+1),
                       cvScalarAll(compCount+1), 
                       -1, -1, 8, 
                       cvPoint(0,0) 
                       );
    }
    
    CvRNG rng = cvRNG(-1);
    colors = cvCreateMat( 1, compCount, CV_8UC3 );
    for( i = 0; i < compCount; i++ ) {
        uchar* ptr = colors->data.ptr + i*3;
        // no colors for now.
        ptr[0] = (uchar)0;//(cvRandInt(&rng)%180 + 50);
        ptr[1] = (uchar)0;//(cvRandInt(&rng)%180 + 50);
        ptr[2] = (uchar)0;//(cvRandInt(&rng)%180 + 50);
    }
    
    cvWatershed( iplTargetImg, iplMarkers32sImg );
    
    // paint the watershed image
    for( i = 0; i < iplMarkers32sImg->height; i++ ) {
        for( j = 0; j < iplMarkers32sImg->width; j++ ) {
            int idx     =  CV_IMAGE_ELEM( iplMarkers32sImg, int, i, j );
            uchar* dst  = &CV_IMAGE_ELEM( iplTargetImg, uchar, i, j*3 );
            if( idx == -1 ) {
                dst[0] = dst[1] = dst[2] = (uchar)255;
            } else if( idx <= 0 || idx > compCount ){
                dst[0] = dst[1] = dst[2] = (uchar)0; // should not get here
            }else {
                uchar* ptr = colors->data.ptr + (idx-1)*3;
                dst[0] = ptr[0]; dst[1] = ptr[1]; dst[2] = ptr[2];
            }
        }
    }
    //cvAddWeighted( watershed, 0.5, colorImg.getCvImage(), 0.5, 0, watershed );
    watershedImg     = iplTargetImg;
    watershedGrayImg = watershedImg;
    watershedGrayImg.threshold(140);
    //watershedGrayImg.invert();
    
    printf("contorus %i", contourFinder.findContours( watershedGrayImg, 10, 
                               (watershedImg.width * watershedImg.height)/ 2.f,
                               20, true));
}
예제 #19
0
//============================================================================
void AAM_CAM::DrawAppearance(IplImage* image, const AAM_Shape& Shape, CvMat* Texture)
{
	AAM_PAW paw;
	int x1, x2, y1, y2, idx1 = 0, idx2 = 0;
	int tri_idx, v1, v2, v3;
	int minx, miny, maxx, maxy;
	paw.Train(Shape, __Points, __Storage, __paw.GetTri(), false);
	AAM_Shape refShape = __paw.__referenceshape;
	double minV, maxV;
	cvMinMaxLoc(Texture, &minV, &maxV);
	cvConvertScale(Texture, Texture, 1/(maxV-minV)*255, -minV*255/(maxV-minV));

	minx = Shape.MinX(); miny = Shape.MinY();
	maxx = Shape.MaxX(); maxy = Shape.MaxY();
	for(int y = miny; y < maxy; y++)
	{
		y1 = y-miny;
		for(int x = minx; x < maxx; x++)
		{
			x1 = x-minx;
			idx1 = paw.Rect(y1, x1);
			if(idx1 >= 0)
			{
				tri_idx = paw.PixTri(idx1);
				v1 = paw.Tri(tri_idx, 0);
				v2 = paw.Tri(tri_idx, 1);
				v3 = paw.Tri(tri_idx, 2);
		
				x2 = paw.Alpha(idx1)*refShape[v1].x + paw.Belta(idx1)*refShape[v2].x +  
					paw.Gamma(idx1)*refShape[v3].x;
				y2 = paw.Alpha(idx1)*refShape[v1].y + paw.Belta(idx1)*refShape[v2].y +  
					paw.Gamma(idx1)*refShape[v3].y;
				
				idx2 = __paw.Rect(y2, x2);
				if(idx2 < 0) continue;

				CV_IMAGE_ELEM(image, byte, y, 3*x) = cvmGet(Texture, 0, 3*idx2);
				CV_IMAGE_ELEM(image, byte, y, 3*x+1) = cvmGet(Texture, 0, 3*idx2+1);
				CV_IMAGE_ELEM(image, byte, y, 3*x+2) = cvmGet(Texture, 0, 3*idx2+2);
			}
		}
	}
}
예제 #20
0
IplImage* ForegroundSegmentSimple(IplImage* fpImage, int block_size, int minStd)
{
	int width = fpImage->width;
	int height = fpImage->height;
	int blockWidth = (int)ceil(width/block_size);
	int blockHeight = (int)ceil(height/block_size);
	IplImage* blockMaskImage = cvCreateImage(cvSize(blockWidth, blockHeight), IPL_DEPTH_8U, 1);
	cvSet(blockMaskImage, cvScalar(0));

	int minVar = minStd * minStd;

	for(int by = 0; by < blockHeight; by++)
	{
		for(int bx = 0; bx < blockWidth; bx++)
		{
			int mn = 0, num = 0, var = 0, value;
			for(int r = 0; r < block_size; r++)
			{
				for(int c = 0; c < block_size; c++)
				{
					int y = by*block_size+r;
					int x = bx*block_size+c;
					if(x<width && y<height)
					{
						value = CV_IMAGE_ELEM(fpImage, BYTE, y, x);
						mn += value;
						var += value*value;
						num++;
					}
				}
			}

			mn = mn/num;
			var = var/num - mn*mn;
			if(var>=minVar)
				CV_IMAGE_ELEM(blockMaskImage, BYTE, by, bx) = 1;
			else
				CV_IMAGE_ELEM(blockMaskImage, BYTE, by, bx) = 0;
		}
	}

	return blockMaskImage;
}
예제 #21
0
int color_line (IplImage *img, int y)
{
    int x;
    for (x = 0 ; x < img->width; x++)
    {

        CV_IMAGE_ELEM(img,uchar,y,x)=128;
    }

    return 0;
}
예제 #22
0
void MaskOF(IplImage* blockMaskImg, IplImage* blockDirImg)
{
	for(int by = 0; by < blockMaskImg->height; by++)
	{
		for(int bx = 0; bx < blockMaskImg->width; bx++)
		{
			if(CV_IMAGE_ELEM(blockMaskImg, BYTE, by, bx)==0)
				CV_IMAGE_ELEM(blockDirImg, char, by, bx) = 91;
		}
	}
}
예제 #23
0
void maxminFilter(IplImage* src, IplImage* dst, int width, int height = 0, int mode = KCV_MAX)
{
	for (int j = 0; j < src->width; ++j)
	{
		for (int i = 0; i < src->height; ++i)
		{
			CvRect rect1 = cvRect(0, 0, src->width, src->height);
			CvRect rect2 = kcvGetRectFromCenterAndSize(j, i, width, height);
			CvRect rect = kcvRectIntersection(rect1, rect2);
			switch (mode)
			{
			case KCV_MAX:
				CV_IMAGE_ELEM(dst, uchar, i, j) = maxValue(src, rect);//注意只是访问了src,而修改的是在dst
				break;
			case KCV_MIN:
				CV_IMAGE_ELEM(dst, uchar, i, j) = minValue(src, rect);
				break;
			case KCV_NMS_MAX:
				if (CV_IMAGE_ELEM(src, uchar, i, j) != maxValue(src, rect))
				{
					CV_IMAGE_ELEM(dst, uchar, i, j) = 0;
				}
				else
				{
					CV_IMAGE_ELEM(dst, uchar, i, j) = CV_IMAGE_ELEM(src, uchar, i, j);
				}
				break;
			case KCV_NMS_MIN:
				if (CV_IMAGE_ELEM(src, uchar, i, j) != minValue(src, rect))
				{
					CV_IMAGE_ELEM(dst, uchar, i, j) = 255;
				}
				else
				{
					CV_IMAGE_ELEM(dst, uchar, i, j) = CV_IMAGE_ELEM(src, uchar, i, j);
				}
				break;
			}
		}
	}
}
예제 #24
0
	void GetBorder(IplImage *img, int cid, vector<CvPoint> &border) {
		IplImage *bdmask = cvCreateImage(cvGetSize(img), 8, 1);
		cvZero(bdmask);

		int fr;// fc;
		for (fr = 0; fr < img->height; fr++) {
			for (int fc = 0; fc < img->width; fc++) {
				if (IsBorder(img, fr, fc, cid))
					CV_IMAGE_ELEM(bdmask, BYTE, fr, fc) = 255;
			}
		}

		for (int i = 0; i < bdmask->width; i++) 
			if (CV_IMAGE_ELEM(bdmask, BYTE, 0, i) == 255) 
				AddOneMask(bdmask, 0, i, border);
		for (int i = 0; i < bdmask->width; i++) 
			if (CV_IMAGE_ELEM(bdmask, BYTE, bdmask->height - 1, i) == 255) 
				AddOneMask(bdmask, bdmask->height - 1, i, border);
		for (int i = 0; i < bdmask->height; i++) 
			if (CV_IMAGE_ELEM(bdmask, BYTE, i, 0) == 255) 
				AddOneMask(bdmask, i, 0, border);
		for (int i = 0; i < bdmask->height; i++) 
			if (CV_IMAGE_ELEM(bdmask, BYTE, i, bdmask->width - 1) == 255) 
				AddOneMask(bdmask, i, bdmask->width - 1, border);

		for (fr = 0; fr < img->height; fr++) {
			for (int fc = 0; fc < img->width; fc++) {
				if (CV_IMAGE_ELEM(bdmask, BYTE, fr, fc) == 255)
					AddOneMask(bdmask, fr, fc, border);				
			}
		}	
		cvReleaseImage(&bdmask);
	}
예제 #25
0
	//Private
	void SegmentManager::OnMouseClickedOnGray(int event, int x, int y, int flags, void* param) {
		SegmentManager* segMgr = SegmentManager::Instance();
		IplImage* gray = segMgr->GrayImage();
		uchar bright = CV_IMAGE_ELEM(gray, uchar, y, x);
		if (event == CV_EVENT_LBUTTONDOWN) {
			IplImage* division = segMgr->GetThreeDivision(gray, bright, segMgr->BottomValue(), segMgr->TopTolerance(), segMgr->BottomTolerance());
			cvShowImage((char*)(segMgr->DIVISION_WIN), division);
		}
		if (event == CV_EVENT_RBUTTONDOWN) {
			IplImage* division = segMgr->GetThreeDivision(gray, segMgr->TopValue(), bright, segMgr->TopTolerance(), segMgr->BottomTolerance());
			cvShowImage((char*)(segMgr->DIVISION_WIN), division);
		}
	}
예제 #26
0
int line_value (IplImage *img,int y)
{
    int x;

    for (x = 0 ; x < img->width; x++)
    {
        // if black pixel return 1
        if (CV_IMAGE_ELEM(img,uchar,y,x) <50)
        {
            return 1;
        }
    }
    return 0;
}
예제 #27
0
//将一幅边缘图像中的非零坐标转换成一个二维矩阵
int ConvertImage2XY(const IplImage* img_edge,point** p)
{
	int i,j,t = 0;
	int width = img_edge->width;
	int height = img_edge->height;
	int numpix = 0;	
	for(i=0;i<width;i++)
		for(j=0;j<height;j++)
		{
			if(CV_IMAGE_ELEM(img_edge,uchar,j,i) == 255)
				numpix ++;
		}
		(*p) = (point*)malloc(sizeof(point)*numpix);
		for(i=0;i<width;i++)
			for(j=0;j<height;j++)
			{
				if(CV_IMAGE_ELEM(img_edge,uchar,j,i) == 255)
				{
					(*p)[t].x = i;
					(*p)[t++].y = j;
				}
			}
			return numpix;
}
예제 #28
0
long kmf(const IplImage* img)
{
	int i,j;
	int ig,jg;
	long mf=0;
	ig = knMe(img,1,0)/knMe(img,0,0);
	jg = knMe(img,0,1)/knMe(img,0,0);
	for(i=0;i<img->height;i++)
	{
		for(j=0;j<img->width;j++)
			mf += (pow((float)(i-ig),2)+pow((float)(j-jg),2))*CV_IMAGE_ELEM(img,uchar,i,j)/255;
		
	}
	return mf;
}
예제 #29
0
int column_value(int y1, int y2,int x, IplImage *img)
{
    int i;
    for(i=y1 +1 ; i<y2;i++)
    {
        // if black pixel return 1
        if (CV_IMAGE_ELEM(img,uchar,i,x) < 50)
        {
            return 1;

        }

    }
    return 0;
}
예제 #30
0
static mxArray* iplimage_to_mxarray(IplImage* p_iplimage)
{

  //info
  int height_  =  p_iplimage->height;
  int width_   =  p_iplimage->width;
  int channels_ =  p_iplimage->nChannels;
  int ipl_depth = p_iplimage->depth;

 //
	mwSize dims[] = {height_, width_, channels_};

    ///TODO: mmmh  
	int ndimensions =( (channels_ > 2 ) ? (3) : (2) );
    //
	mxArray* mxresult = mxCreateNumericArray(ndimensions, dims, matlab::traits<T>::tag, mxREAL);

  //pointer to newly created mxArray
	matlab::traits<T>::ptr mx_start_ptr = 
      static_cast<matlab::traits<T>::ptr>(mxGetData(mxresult));

  //offsets
  int col_offset_ = 0;
  const int channel_offset_ = height_*width_;
  size_t bgr_channel_index_ = 0;

	//Loop...
  //cols
	for(int col_ = 0 ; col_ < width_; col_++)
  {
		col_offset_ = col_*height_;
    //rows
		for(int row_ = 0 ; row_ < height_; row_++)
			{
        //channels
        for(int channel_index_ = 0; channel_index_ < channels_; channel_index_++)
        {
          ///IplImage is BGR
          bgr_channel_index_ = (channels_ - channel_index_ - 1);

          mx_start_ptr[(col_offset_)+ row_+(channel_index_*channel_offset_)] =
            CV_IMAGE_ELEM(p_iplimage,T, row_, (col_*channels_) + bgr_channel_index_);
        }
      }

  }
	return mxresult;
}