Пример #1
0
int main()
{
   
    IplImage* img=cvLoadImage("3.7.jpg");

    if(!img)
    {
        printf("Connot open the image\n");
        return -1;

    }
    IplImage* rImg=cvCreateImage(cvGetSize(img),img->depth,1);
    IplImage* gImg=cvCreateImage(cvGetSize(img),img->depth,1);
    IplImage* bImg=cvCreateImage(cvGetSize(img),img->depth,1);

    IplImage* clone1=cvCreateImage(cvGetSize(img),img->depth,1);
    IplImage* clone2=cvCreateImage(cvGetSize(img),img->depth,1);

    double green_maxPixel=0;
    double green_minPixel=0;
    double thresh;

    cvSplit(img,rImg,gImg,bImg,0);

    cvNamedWindow("GreenImage0");
    cvShowImage("GreenImage0",gImg);

    cvCopy(gImg,clone1);
    cvCopy(gImg,clone2);

    cvMinMaxLoc(gImg,&green_maxPixel,&green_minPixel);

    thresh=(unsigned char)(green_maxPixel-green_minPixel)/2.0;
    cvSet(clone1,cvScalar(thresh));

    cvZero(clone2);
    cvCmp(gImg,clone1,clone2,CV_CMP_GE);//掩码标记

    cvSubS(gImg,cvScalar(thresh/2),gImg,clone2);//两个矩阵相减

    cvNamedWindow("GreenImage1");
    cvShowImage("GreenImage1",gImg);
    cvWaitKey(0);
    cvReleaseImage(&img);
    cvReleaseImage(&rImg);
    cvReleaseImage(&gImg);
    cvReleaseImage(&bImg);
    cvDestroyWindow("GreenImage0");
    cvDestroyWindow("GreenImage1");

    return 0;

    
}
Пример #2
0
static void node_composit_exec_cvSub(void *data, bNode *node, bNodeStack **in, bNodeStack **out) {
    CvArr* dst;
    CvArr* src1;
    CvArr* src2;
    CvArr* mask = NULL;
    CompBuf *dst_buf;

    if (out[0]->hasoutput == 0) return;
    if ((in[0]->data)) {
        src1 = BOCV_IplImage_attach(in[0]->data);

        if (in[2]->data) {
            mask = BOCV_Mask_attach(in[2]->data);
            if (!BOCV_checkMask(src1, mask)) {
                node->error = 1;
                return;
            }
        }

        //Create output
        dst_buf = dupalloc_compbuf(in[0]->data);
        dst = BOCV_IplImage_attach(dst_buf);


        //If there are second input
        if (in[1]->data) {
            src2 = BOCV_IplImage_attach(in[1]->data);
            if (!BOCV_checkAreSameType(src1, src2)) {
                node->error = 1;
                return;
            }
            if (!BOCV_checkSameNChannels(src1, src2)) {
                node->error = 1;
                return;
            }
            cvSub(src1, src2, dst, mask);
            BOCV_IplImage_detach(src2);
        } else {
            CvScalar s;
            s.val[0] = (in[1]->vec[0]);
            s.val[1] = (in[1]->vec[1]);
            s.val[2] = (in[1]->vec[2]);
            s.val[3] = 0;
            cvSubS(src1, s, dst, mask);
        }

        out[0]->data = dst_buf;
        BOCV_IplImage_detach(src1);
        BOCV_IplImage_detach(dst);
    }
}
Пример #3
0
//形态学H极大值
void lhMorpHMax(const IplImage* src, IplImage* dst, unsigned char h, IplConvKernel* se = NULL)
{
	assert(src != NULL  &&  dst != NULL && src != dst );

	//p150
	IplImage*  temp = cvCreateImage(cvGetSize(src), 8, 1);

	cvSubS(src, cvScalar(h), temp);
	
	lhMorpRDilate(temp, src, dst, se);

	cvReleaseImage(&temp);

}
Пример #4
0
static void work(int)
{
	int r, b, g, subR, subB, subG;
	r = cvGetTrackbarPos("增加紅", ctrlPanel1);
	b = cvGetTrackbarPos("增加藍", ctrlPanel1);
	g = cvGetTrackbarPos("增加綠", ctrlPanel1);
	subR = cvGetTrackbarPos("減少紅", ctrlPanel2);
	subB = cvGetTrackbarPos("減少藍", ctrlPanel2);
	subG = cvGetTrackbarPos("減少綠", ctrlPanel2);

	des = cvCloneImage(img);
	cvAddS(img, CV_RGB(r, g, b), des, 0);
	cvSubS(des, CV_RGB(subR, subG, subB), des, 0);
	cvShowImage(windowName, des);
}
Пример #5
0
//形态学区域极大值
void lhMorpRMax(const IplImage* src, IplImage* dst, IplConvKernel* se = NULL)
{
	assert(src != NULL  &&  dst != NULL && src != dst );

	//p149 (6.13)
	IplImage*  temp = cvCreateImage(cvGetSize(src), 8, 1);

	cvSubS(src, cvScalar(1), temp);
	
	lhMorpRDilate(temp, src, dst, se);

	cvSub(src, dst, dst);

	cvReleaseImage(&temp);

}
Пример #6
0
int main(){
    
    //initialize
    IplImage* src_image = 0;
    IplImage* bright_image = 0;
    IplImage* dark_image = 0;
    
    //load image
    src_image = cvLoadImage("/Users/ihong-gyu/MyProject/OpenCVTest/Lena.jpeg",0);
    
    //create a window
    cvNamedWindow("Original Image", CV_WINDOW_AUTOSIZE);
    cvNamedWindow("Bright Image", CV_WINDOW_AUTOSIZE);
    cvNamedWindow("Dark Image", CV_WINDOW_AUTOSIZE);
    
    
    //create images
    bright_image = cvCreateImage(cvGetSize(src_image),IPL_DEPTH_8U, 1);
    dark_image = cvCreateImage(cvGetSize(src_image),IPL_DEPTH_8U, 1);
    
    //add
    cvAddS(src_image, CV_RGB(60,60,60), bright_image,NULL);
    cvSubS(src_image, CV_RGB(60,60,60), dark_image,NULL);
    
    
    //show the image
    cvShowImage("Original Image", src_image);
    cvShowImage("Bright Image", bright_image);
    cvShowImage("Dark Image", dark_image);
    
    
    //wait for a key
    cvWaitKey(0);
    
    //release the image
    cvReleaseImage(&src_image);
    cvReleaseImage(&bright_image);
    cvReleaseImage(&dark_image);
    
    return 0;
}
Пример #7
0
void BlobLight::draw(){
	
	ofPushStyle();
	ofxCvGrayscaleImage Largeimg = blob(0)->grayDiff;
	img.scaleIntoMe(Largeimg);
	getPlugin<CameraCalibration*>(controller)->applyWarp(0);

	cvAddWeighted(history.getCvImage(),alpha, img.getCvImage(),beta,0.0, history.getCvImage());
	cvSubS(history.getCvImage(), cvScalar(addblack*100) , history.getCvImage());
	history.flagImageChanged();	

	if(blur > 0)
		img.blur(blur);
	if(threshold > 0){
		img.threshold(threshold, false);
	}
	if(blur2 > 0){
		img.blurGaussian(blur2);
	}
	

	historyTmp = history;
	historyTmp.blurGaussian(blur2);
	
	ofEnableAlphaBlending();
	glBlendFunc (GL_SRC_COLOR, GL_ONE);	

	ofSetColor(historyalpha*r, historyalpha*g, historyalpha*b, historyalpha*255);
	historyTmp.draw(0,0,1,1);
	
	ofSetColor( blobalpha*r2,  blobalpha*g2,  blobalpha*b2, blobalpha*255);
	img.draw(0, 0, 1,1);
	glPopMatrix();
	
//	img.draw(0, 0);

	ofPopStyle();
}
Пример #8
0
int main(int argc, char **argv)
{
  int first = 1;
  int angle = 0;
  double duration = 5;
  IplImage *image = 0;
  IplImage *image2 = 0;
  IplImage *prev = 0;
  IplImage *output = 0;
  IplImage *depth;
  IplImage *diff = 0;
  IplImage *bw = 0;
  IplImage *edge = 0;
  IplImage *edge2 = 0;
  //  if (!prev) prev = cvCreateImageHeader(cvSize(640,480), 8, 3);
  //if (!diff) diff = cvCreateImageHeader(cvSize(640,480), 8, 3);
  diff = cvCreateImage(cvSize(640,480),8,3);
  bw = cvCreateImage(cvSize(640,480),8,1);
  edge = cvCreateImage(cvSize(640,480),8,1);
  edge2 = cvCreateImage(cvSize(640,480),8,1);
  output  = cvCreateImage(cvSize(640,480),8,3);
  cvZero( output );
  
  //cvCvtColor(output, output, CV_RGB2BGR);
  while (1) {
    switch(cvWaitKey(10)){

    case 113:
      exit(0);
    case 'w':
      angle++;
      if(angle > 30)
	angle = 30;
      set_tilt_cv(angle,0);
      break;
    case 'x':
      angle--;
      if(angle < -30)
	angle = -30;
      set_tilt_cv(angle,0);
      break;
    case 's':
      angle = 0;
      set_tilt_cv(angle,0);
      break;
    case 'e':
      angle += 10;
      if(angle > 30)
	angle = 30;
      set_tilt_cv(angle,0);
      break;
    case 'c':
      angle -=10;
      if(angle < -30)
	angle = -30;
      set_tilt_cv(angle,0);
      break;
    default:
      // cvWaitKey(700);
      if(first){
	prev = freenect_sync_get_rgb_cv(0);
	//first = 0;
      }
      else
	{
	  prev = cvCloneImage(image2);
	  cvReleaseImage(&image2); 
	}
      image  = freenect_sync_get_rgb_cv(0);
      image2  = cvCloneImage(image);
      if (!image) {
	printf("Error: Kinect not connected?\n");
	return -1;
      }
      cvCvtColor(image, image, CV_RGB2BGR);
      //      cvCvtColor(image2, image2, CV_RGB2BGR);

      cvAbsDiff(image,prev,diff);
      cvCvtColor(diff, bw,CV_BGR2GRAY);
      cvCanny(bw, edge, 29000,30500,7);
      //      cvThreshold(bw,bw,100,254,CV_THRESH_BINARY);
      cvNot(edge,edge2);

      if(!first)
	{
	  cvSubS(output,cvScalar(255,255,255,255),output,0);
	  cvAnd(GlViewColor(depth),GlViewColor(depth),output,edge);
	  //cvRunningAvg(GlViewColor(depth),output,1,edge);
	}
      //cvRunningAvg(image,output,1,edge);

      if(!first)
	{
	  cvReleaseImage(&prev);
	}
      else
	first = 0;
      cvAddWeighted(image, .3, output, .7, 1, image);
      
      //       OverlayImage(image2, output, cvPoint(0, 0), cvScalar(0.8,0.8,0.8,0.8), cvScalar(0.2,0.2,0.2,0.2));
      /*
      CvPoint* points[1];
      CvPoint ptt[5];
      points[0] = &(ptt[0]);
      points[0][0] = cvPoint(100,100);
      points[0][1] = cvPoint(200,100);
      points[0][2] = cvPoint(150,150);
      points[0][3] = cvPoint(150,300);
      points[0][4] = cvPoint(100,250);

      int npts[1];
      npts[0]=5;
	cvPolyLine(image, points, npts, 1,1, cvScalar(100,100,100,230),1, 8,0);
	cvFillPoly(image, points, npts,1, cvScalar(100,100,100,230), 8,0);
      */
      depth = freenect_sync_get_depth_cv(0);
      cvSmooth(depth,depth,CV_BLUR,18,18,2.0,2.0);
      if (!depth) {
	printf("Error: Kinect not connected?\n");
	return -1;
      }
      cvShowImage("RGB", image);
      cvShowImage("Output", output);

      cvShowImage("Depth", GlViewColor(depth));
      break;
    }
  }
}
Пример #9
0
void ofCvImage::operator -= ( float scalar ) {
	cvSubS( cvImage, cvScalar(scalar), cvImageTemp );
	swapTemp();
}
Пример #10
0
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
CvMat *tgso (CvMat &tmap, int ntex, double sigma, double theta, CvMat &tsim, int useChi2) {


	CvMat *roundTmap=cvCreateMat(tmap.rows,tmap.cols,CV_32FC1);
	CvMat *comp=cvCreateMat(tmap.rows,tmap.cols,CV_32FC1);

	for (int i=0;i<tmap.rows;i++)
		for (int j=0;j<tmap.cols;j++)
			cvSetReal2D(roundTmap,i,j,cvRound(cvGetReal2D(&tmap,i,j)));

	cvSub(&tmap,roundTmap,comp);
	if (cvCountNonZero(comp)) {
		printf("texton labels not integral");
		cvReleaseMat(&roundTmap);
		cvReleaseMat(&comp);
		exit(1);
	}

	double min,max;
	cvMinMaxLoc(&tmap,&min,&max);
	if (min<1 && max>ntex) {
		char *msg=new char[50];
		printf(msg,"texton labels out of range [1,%d]",ntex);
		cvReleaseMat(&roundTmap);
		cvReleaseMat(&comp);
		exit(1);
	}

	cvReleaseMat(&roundTmap);
	cvReleaseMat(&comp);


	double wr=floor(sigma); //sigma=radius (Leo) 

	CvMat *x=cvCreateMat(1,wr-(-wr)+1, CV_64FC1);
	CvMat *y=cvCreateMat(wr-(-wr)+1,1, CV_64FC1);

	CvMat *u=cvCreateMat(wr-(-wr)+1,wr-(-wr)+1, CV_64FC1);
	CvMat *v=cvCreateMat(wr-(-wr)+1,wr-(-wr)+1, CV_64FC1);
	CvMat *gamma=cvCreateMat(u->rows,v->rows, CV_64FC1);

	// Set x,y directions 
	for (int j=-wr;j<=wr;j++) {
		cvSetReal2D(x,0,(j+wr),j);
		cvSetReal2D(y,(j+wr),0,j);
	}

	// Set u,v, meshgrids
	for (int i=0;i<u->rows;i++) {
		cvRepeat(x,u);
		cvRepeat(y,v);
	}

	// Compute the gamma matrix from the grid
	for (int i=0;i<u->rows;i++) 
		for (int j=0;j<u->cols;j++)
			cvSetReal2D(gamma,i,j,atan2(cvGetReal2D(v,i,j),cvGetReal2D(u,i,j)));

	cvReleaseMat(&x);
	cvReleaseMat(&y);

	CvMat *sum=cvCreateMat(u->rows,u->cols, CV_64FC1);
	cvMul(u,u,u);
	cvMul(v,v,v);
	cvAdd(u,v,sum);
	CvMat *mask=cvCreateMat(u->rows,u->cols, CV_8UC1);
	cvCmpS(sum,sigma*sigma,mask,CV_CMP_LE);
	cvConvertScale(mask,mask,1.0/255);
	cvSetReal2D(mask,wr,wr,0);
	int count=cvCountNonZero(mask);

	cvReleaseMat(&u);
	cvReleaseMat(&v);
	cvReleaseMat(&sum);

	CvMat *sub=cvCreateMat(mask->rows,mask->cols, CV_64FC1);
	CvMat *side=cvCreateMat(mask->rows,mask->cols, CV_8UC1);

	cvSubS(gamma,cvScalar(theta),sub);
	cvReleaseMat(&gamma);

	for (int i=0;i<mask->rows;i++){
		for (int j=0;j<mask->cols;j++) {
			double n=cvmGet(sub,i,j);
			double n_mod = n-floor(n/(2*M_PI))*2*M_PI;
			cvSetReal2D(side,i,j, 1 + int(n_mod < M_PI));
		}
	}

	cvMul(side,mask,side);
	cvReleaseMat(&sub);
	cvReleaseMat(&mask);

	CvMat *lmask=cvCreateMat(side->rows,side->cols, CV_8UC1);
	CvMat *rmask=cvCreateMat(side->rows,side->cols, CV_8UC1);
	cvCmpS(side,1,lmask,CV_CMP_EQ);
	cvCmpS(side,2,rmask,CV_CMP_EQ);
	int count1=cvCountNonZero(lmask), count2=cvCountNonZero(rmask);
	if (count1 != count2) {
		printf("Bug: imbalance\n");
	}

	CvMat *rlmask=cvCreateMat(side->rows,side->cols, CV_32FC1);
	CvMat *rrmask=cvCreateMat(side->rows,side->cols, CV_32FC1);
	cvConvertScale(lmask,rlmask,1.0/(255*count)*2);
	cvConvertScale(rmask,rrmask,1.0/(255*count)*2);


	cvReleaseMat(&lmask);
	cvReleaseMat(&rmask);
	cvReleaseMat(&side);

	int h=tmap.rows;
	int w=tmap.cols;


	CvMat *d       = cvCreateMat(h*w,ntex,CV_32FC1);
	CvMat *coltemp = cvCreateMat(h*w,1,CV_32FC1);
	CvMat *tgL     = cvCreateMat(h,w, CV_32FC1);
	CvMat *tgR     = cvCreateMat(h,w, CV_32FC1);
	CvMat *temp    = cvCreateMat(h,w,CV_8UC1);
	CvMat *im      = cvCreateMat(h,w, CV_32FC1);
	CvMat *sub2    = cvCreateMat(h,w,CV_32FC1);
	CvMat *sub2t   = cvCreateMat(w,h,CV_32FC1);
	CvMat *prod    = cvCreateMat(h*w,ntex,CV_32FC1);
	CvMat reshapehdr,*reshape;

	CvMat* tgL_pad = cvCreateMat(h+rlmask->rows-1,w+rlmask->cols-1,CV_32FC1);
	CvMat* tgR_pad = cvCreateMat(h+rlmask->rows-1,w+rlmask->cols-1,CV_32FC1);
	CvMat* im_pad  = cvCreateMat(h+rlmask->rows-1,w+rlmask->cols-1,CV_32FC1);

	CvMat *tg=cvCreateMat(h,w,CV_32FC1);
	cvZero(tg);
	
	if (useChi2 == 1){
		CvMat* temp_add1 = cvCreateMat(h,w,CV_32FC1);
		for (int i=0;i<ntex;i++) {
			cvCmpS(&tmap,i+1,temp,CV_CMP_EQ); 
			cvConvertScale(temp,im,1.0/255);

			cvCopyMakeBorder(tgL,tgL_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);
			cvCopyMakeBorder(tgR,tgR_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);
			cvCopyMakeBorder(im,im_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);

			cvFilter2D(im_pad,tgL_pad,rlmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2));
			cvFilter2D(im_pad,tgR_pad,rrmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2));

			cvGetSubRect(tgL_pad,tgL,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgL->cols,tgL->rows));
			cvGetSubRect(tgR_pad,tgR,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgR->cols,tgR->rows));

			cvSub(tgL,tgR,sub2);
			cvPow(sub2,sub2,2.0);
			cvAdd(tgL,tgR,temp_add1);
			cvAddS(temp_add1,cvScalar(0.0000000001),temp_add1);
			cvDiv(sub2,temp_add1,sub2);
			cvAdd(tg,sub2,tg);
		}
		cvScale(tg,tg,0.5);

		cvReleaseMat(&temp_add1);

	}
	else{// if not chi^2
		for (int i=0;i<ntex;i++) {
			cvCmpS(&tmap,i+1,temp,CV_CMP_EQ); 
			cvConvertScale(temp,im,1.0/255);

			cvCopyMakeBorder(tgL,tgL_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);
			cvCopyMakeBorder(tgR,tgR_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);
			cvCopyMakeBorder(im,im_pad,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2),IPL_BORDER_CONSTANT);

			cvFilter2D(im_pad,tgL_pad,rlmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2));
			cvFilter2D(im_pad,tgR_pad,rrmask,cvPoint((rlmask->cols-1)/2,(rlmask->rows-1)/2));

			cvGetSubRect(tgL_pad,tgL,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgL->cols,tgL->rows));
			cvGetSubRect(tgR_pad,tgR,cvRect((rlmask->cols-1)/2,(rlmask->rows-1)/2,tgR->cols,tgR->rows));

			cvSub(tgL,tgR,sub2);
			cvAbs(sub2,sub2);
			cvTranspose(sub2,sub2t);
			reshape=cvReshape(sub2t,&reshapehdr,0,h*w);
			cvGetCol(d,coltemp,i);
			cvCopy(reshape,coltemp);
		}

		cvMatMul(d,&tsim,prod);
		cvMul(prod,d,prod);


		CvMat *sumcols=cvCreateMat(h*w,1,CV_32FC1);
		cvSetZero(sumcols);
		for (int i=0;i<prod->cols;i++) {
			cvGetCol(prod,coltemp,i);
			cvAdd(sumcols,coltemp,sumcols);
		}

		reshape=cvReshape(sumcols,&reshapehdr,0,w);
		cvTranspose(reshape,tg);

		cvReleaseMat(&sumcols);
	}


	//Smooth the gradient now!!
	tg=fitparab(*tg,sigma,sigma/4,theta);
	cvMaxS(tg,0,tg); 

	
	cvReleaseMat(&im_pad);
	cvReleaseMat(&tgL_pad);
	cvReleaseMat(&tgR_pad);
	cvReleaseMat(&rlmask);
	cvReleaseMat(&rrmask);
	cvReleaseMat(&im);
	cvReleaseMat(&tgL);
	cvReleaseMat(&tgR);
	cvReleaseMat(&temp);
	cvReleaseMat(&coltemp);
	cvReleaseMat(&sub2);
	cvReleaseMat(&sub2t);
	cvReleaseMat(&d);
	cvReleaseMat(&prod);

	return tg;

}
Пример #11
0
//--------------------------------------------------------------------------------
void ofxCvColorImage::operator -= ( float value ) {
	cvSubS( cvImage, cvScalar(value, value, value), cvImageTemp );
	swapTemp();
    flagImageChanged();
}
Пример #12
0
int main(int argc, char **argv)
{
  bool isStop = false;
  const int INIT_TIME = 50;
  const double BG_RATIO = 0.02; // 背景領域更新レート
  const double OBJ_RATIO = 0.005; // 物体領域更新レート
  const double Zeta = 10.0;
  IplImage *img = NULL;

  CvCapture *capture = NULL;
  capture = cvCreateCameraCapture(0);
  //capture = cvCaptureFromAVI("test.avi");
  if(capture == NULL){
    printf("capture device not found!!");
    return -1;
  }

  img = cvQueryFrame(capture);
  int w = img->width;
  int h = img->height;

  IplImage *imgAverage = cvCreateImage(cvSize(w, h), IPL_DEPTH_32F, 3);
  IplImage *imgSgm = cvCreateImage(cvSize(w, h), IPL_DEPTH_32F, 3);
  IplImage *imgTmp = cvCreateImage(cvSize(w, h), IPL_DEPTH_32F, 3);
  IplImage *img_lower = cvCreateImage(cvSize(w, h), IPL_DEPTH_32F, 3);
  IplImage *img_upper = cvCreateImage(cvSize(w, h), IPL_DEPTH_32F, 3);
  IplImage *imgSilhouette = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 1);
  IplImage *imgSilhouetteInv = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 1);
  IplImage *imgResult = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 3);

  printf("背景初期化中...\n");
  cvSetZero(imgAverage);
  for(int i = 0; i < INIT_TIME; i++){
    img = cvQueryFrame(capture);
    cvAcc(img, imgAverage);
    printf("輝度平均 %d/%d\n", i, INIT_TIME);
  }
  cvConvertScale(imgAverage, imgAverage, 1.0 / INIT_TIME);
  cvSetZero(imgSgm);
  for(int i = 0; i < INIT_TIME; i++){
    img = cvQueryFrame(capture);
    cvConvert(img, imgTmp);
    cvSub(imgTmp, imgAverage, imgTmp);
    cvPow(imgTmp, imgTmp, 2.0);
    cvConvertScale(imgTmp, imgTmp, 2.0);
    cvPow(imgTmp, imgTmp, 0.5);
    cvAcc(imgTmp, imgSgm);
    printf("輝度振幅 %d/%d\n", i, INIT_TIME);
  }
  cvConvertScale(imgSgm, imgSgm, 1.0 / INIT_TIME);
  printf("背景初期化完了\n");

  char winNameCapture[] = "Capture";
  char winNameSilhouette[] = "Silhouette";
  cvNamedWindow(winNameCapture, CV_WINDOW_AUTOSIZE);
  cvNamedWindow(winNameSilhouette, CV_WINDOW_AUTOSIZE);

  while(1){
    if(!isStop){
      img = cvQueryFrame(capture);
      if(img == NULL) break;
      cvConvert(img, imgTmp);

      // 輝度範囲
      cvSub(imgAverage, imgSgm, img_lower);
      cvSubS(img_lower, cvScalarAll(Zeta), img_lower);
      cvAdd(imgAverage, imgSgm, img_upper);
      cvAddS(img_upper, cvScalarAll(Zeta), img_upper);
      cvInRange(imgTmp, img_lower, img_upper, imgSilhouette);

      // 輝度振幅
      cvSub(imgTmp, imgAverage, imgTmp);
      cvPow(imgTmp, imgTmp, 2.0);
      cvConvertScale(imgTmp, imgTmp, 2.0);
      cvPow(imgTmp, imgTmp, 0.5);

      // 背景領域を更新
      cvRunningAvg(img, imgAverage, BG_RATIO, imgSilhouette);
      cvRunningAvg(imgTmp, imgSgm, BG_RATIO, imgSilhouette);

      // 物体領域を更新
      cvNot(imgSilhouette, imgSilhouetteInv);
      cvRunningAvg(imgTmp, imgSgm, OBJ_RATIO, imgSilhouetteInv);

      cvErode(imgSilhouette, imgSilhouette, NULL, 1); // 収縮
      cvDilate(imgSilhouette, imgSilhouette, NULL, 2); // 膨張
      cvErode(imgSilhouette, imgSilhouette, NULL, 1); // 収縮

      cvMerge(imgSilhouette, imgSilhouette, imgSilhouette, NULL, imgResult);
      cvShowImage(winNameCapture, img);
      cvShowImage(winNameSilhouette, imgResult);
    }
    int waitKey = cvWaitKey(33);
    if(waitKey == 'q') break;
    if(waitKey == ' '){
      isStop = !isStop;
      if(isStop) printf("stop\n");
      else printf("start\n");
    }
  }

  cvReleaseCapture(&capture);
  cvDestroyWindow(winNameCapture);
  cvDestroyWindow(winNameSilhouette);

  return 0;
}
Пример #13
0
 void cvSubS_wrap(const CvArr * src , CvScalar value , CvArr * dst , const CvArr * mask ){
	cvSubS(/*const*//*CvArr*//***/src , /*CvScalar*/value , /*CvArr*//***/dst , /*const*//*CvArr*//***/mask);
}
Пример #14
0
void FaceNormIllu::do_NormIlluRETINA(IplImage *img_64, IplImage *dst, const double Thr){

    const CvSize fsize=cvGetSize(img_64);

//    IplImage *img_64=cvCreateImage(fsize, IPL_DEPTH_64F, 1);

    IplImage *img_gauss_sigma_1=cvCreateImage(fsize, IPL_DEPTH_64F, 1);
    IplImage *img_gauss_sigma_2=cvCreateImage(fsize, IPL_DEPTH_64F, 1);

    IplImage *F1=cvCreateImage(fsize, IPL_DEPTH_64F, 1);
    IplImage *F2=cvCreateImage(fsize, IPL_DEPTH_64F, 1);

    IplImage *I_la1=cvCreateImage(fsize, IPL_DEPTH_64F, 1);
    IplImage *I_la2=cvCreateImage(fsize, IPL_DEPTH_64F, 1);

    IplImage *Ibip=cvCreateImage(fsize, IPL_DEPTH_64F, 1);
    //IplImage *Inorm=cvCreateImage(fsize, IPL_DEPTH_64F, 1);

//    cvConvert(img, img_64);



    //const double mI1=cvMean(img_64)*0.5;
    const double mI1 = cvAvg(img_64).val[0] * 0.5;

    // NxN, N=6*sigma + 1, where NxN is the gaussian smooth filter
    cvSmooth(img_64, img_gauss_sigma_1, CV_GAUSSIAN, 7, 7, 1, 0);


    cvAddS(img_gauss_sigma_1, cvScalar(mI1), F1, 0);

    double minV, maxV;
    cvMinMaxLoc(img_64, 0, &maxV);

    //	#pragma omp parallel for

    for (int i=0; i<fsize.height; i++){
        double *ptr_img_64=(double*)(img_64->imageData+i*img_64->widthStep);
        double *ptr_F1=(double*)(F1->imageData+i*F1->widthStep);
        double *ptr_I_la1=(double*)(I_la1->imageData+i*I_la1->widthStep);
        for (int j=0; j<fsize.width; j++){
            const double vImg64=ptr_img_64[j];
            const double vF1=ptr_F1[j];
            ptr_I_la1[j]=(maxV+vF1)*vImg64/(vImg64+vF1);
        }
    }

    cvSmooth(I_la1, img_gauss_sigma_2, CV_GAUSSIAN, 19, 19, 3, 0);

//    const double mI2=cvMean(I_la1)*0.5;
    const double mI2 = cvAvg(img_64).val[0] * 0.5;

    cvAddS(img_gauss_sigma_2, cvScalar(mI2), F2, 0);

    cvMinMaxLoc(I_la1, 0, &maxV);

    //#pragma omp parallel for

    for (int i=0; i<fsize.height; i++){
        double *ptr_I_la1=(double*)(I_la1->imageData+i*I_la1->widthStep);
        double *ptr_F2=(double*)(F2->imageData+i*F2->widthStep);
        double *ptr_I_la2=(double*)(I_la2->imageData+i*I_la2->widthStep);
        for (int j=0; j<fsize.width; j++){
            const double vI_la1=ptr_I_la1[j];
            const double vF2=ptr_F2[j];
            ptr_I_la2[j]=(maxV+vF2)*vI_la1/(vI_la1+vF2);
        }
    }



    //// DoG filter, sigma1=0.5, sigma2=4
    cvSmooth(I_la2, img_gauss_sigma_1, CV_GAUSSIAN, 3, 3, 0.5, 0);

    cvSmooth(I_la2, img_gauss_sigma_2, CV_GAUSSIAN, 25, 25, 4, 0);

    cvSub(img_gauss_sigma_1, img_gauss_sigma_2, Ibip,0);

    double meanV, stdV;
//    cvMean_StdDev(Ibip, &meanV, &stdV, 0);
    CvScalar _avgV, _stdV;
    cvAvgSdv(Ibip, &_avgV, &_stdV);
    meanV = _avgV.val[0];
    stdV = _stdV.val[0];

    cvConvertScale(Ibip, dst, 1./stdV, 0);
    //#pragma omp parallel for
    for (int i=0; i<fsize.height; i++){
        double *ptr_Inorm=(double*)(dst->imageData+i*dst->widthStep);
        for (int j=0; j<fsize.width; j++){
            const double vInorm=ptr_Inorm[j];
            //printf("%f	",vInorm);
            if (vInorm>=0){
                ptr_Inorm[j]=MIN(Thr, fabs(vInorm));
            }
            else{
                ptr_Inorm[j]=-MIN(Thr, fabs(vInorm));
            }
        }
    }

//    cvReleaseImage(&img_64);
    cvReleaseImage(&img_gauss_sigma_1);
    cvReleaseImage(&img_gauss_sigma_2);
    cvReleaseImage(&F1);
    cvReleaseImage(&F2);
    cvReleaseImage(&I_la1);
    cvReleaseImage(&I_la2);

    cvReleaseImage(&Ibip);

    // normalizar los valores [0.0, 1.0]
    cvMinMaxLoc(dst, &minV, &maxV);
    cvSubS(dst, cvScalar(minV), dst);
    cvConvertScale(dst, dst, 1.0/(maxV - minV));

}
Пример #15
0
static GstFlowReturn
gst_motiondetect_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
{
  GstMessage *m = NULL;
  StbtMotionDetect *filter = GST_MOTIONDETECT (trans);
  if ((!filter) || (!buf)) {
    return GST_FLOW_OK;
  }

  GST_OBJECT_LOCK(filter);
  if (filter->enabled && filter->state != MOTION_DETECT_STATE_INITIALISING) {
    IplImage *referenceImageGrayTmp = NULL;
    static int frameNo = 1;

    filter->cvCurrentImage->imageData = (char *) GST_BUFFER_DATA (buf);
    cvCvtColor( filter->cvCurrentImage,
        filter->cvCurrentImageGray, CV_BGR2GRAY );

    if (filter->debugDirectory) {
      gst_motiondetect_log_image (filter->cvCurrentImageGray, 
          filter->debugDirectory, frameNo, "source.png");
    }

    if (filter->state == MOTION_DETECT_STATE_REFERENCE_IMAGE_ACQUIRED) {
      gboolean result;
      
      result = gst_motiondetect_apply(
          filter->cvReferenceImageGray, filter->cvCurrentImageGray,
          filter->cvMaskImage, filter->noiseThreshold);

      if (filter->debugDirectory) {
        if (result) {
          gst_motiondetect_log_image (filter->cvReferenceImageGray, 
              filter->debugDirectory, frameNo,
              "absdiff_not_masked_motion.png");
        } else {
          gst_motiondetect_log_image (filter->cvReferenceImageGray, 
              filter->debugDirectory, frameNo,
              "absdiff_not_masked_no_motion.png");
        }
        gst_motiondetect_log_image (filter->cvMaskImage,
              filter->debugDirectory, frameNo, "mask.png");
      }

      GstStructure *s = gst_structure_new ("motiondetect",
          "has_motion", G_TYPE_BOOLEAN, result,
          "masked", G_TYPE_BOOLEAN, (filter->mask != NULL),
          "mask_path", G_TYPE_STRING, filter->mask, NULL);
      m = gst_message_new_element (GST_OBJECT (filter), s);

      if (filter->display) {
        buf = gst_buffer_make_writable (buf);
        cvSubS (filter->cvCurrentImage, CV_RGB(100, 100, 100),
            filter->cvCurrentImage, filter->cvInvertedMaskImage);
        if (result) {
          cvAddS (filter->cvCurrentImage, CV_RGB(50, 0, 0),
              filter->cvCurrentImage, filter->cvMaskImage);
        }
      }
    }

    referenceImageGrayTmp = filter->cvReferenceImageGray;
    filter->cvReferenceImageGray = filter->cvCurrentImageGray;
    filter->cvCurrentImageGray = referenceImageGrayTmp;
    filter->state = MOTION_DETECT_STATE_REFERENCE_IMAGE_ACQUIRED;
    ++frameNo;
  }
  GST_OBJECT_UNLOCK(filter);

  if (m) {
    gst_element_post_message (GST_ELEMENT (filter), m);
  }

  return GST_FLOW_OK;
}
void ImageProcessing::getDisparity() {


    int windowSize = 9;
    int DSR = 20;
    //char *leftImgPath, *rightImgPath;
    //cout<<"Enter full path of Left image ";
    //cin>>leftImgPath;
    //cout<<"Enter full path of Left image ";
    //cin>>rightImgPath;
    IplImage *LeftinputImage = cvLoadImage("../outputs/raw/left-0.pgm", 0);
    IplImage *RightinputImage = cvLoadImage("../outputs/raw/right-0.pgm", 0);

    //    int width = LeftinputImage->width;
    //    int height = LeftinputImage->height;

    /****************8U to 32F**********************/
    IplImage *LeftinputImage32 = cvCreateImage(cvSize(LeftinputImage->width, LeftinputImage->height), 32, 1);
    //IPL_DEPTH_32F
    IplImage *RightinputImage32 = cvCreateImage(cvSize(LeftinputImage->width, LeftinputImage->height), 32, 1);
    cvConvertScale(LeftinputImage, LeftinputImage32, 1 / 255.);
    cvConvertScale(RightinputImage, RightinputImage32, 1 / 255.);

    int offset = floor((double) windowSize / 2);
    int height = LeftinputImage32->height;
    int width = LeftinputImage32->width;
    double *localNCC = new double[DSR];

    int x = 0, y = 0, d = 0, m = 0;
    int N = windowSize;

    IplImage *leftWinImg = cvCreateImage(cvSize(N, N), 32, 1);
    //mySubImage(LeftinputImage32,cvRect(0,0,N,N));
    IplImage *rightWinImg = cvCreateImage(cvSize(N, N), 32, 1);
    //mySubImage(RightinputImage32,cvRect(0,0,N,N));
    IplImage *disparity = cvCreateImage(cvSize(width, height), 8, 1);
    //or IPL_DEPTH_8U
    BwImage imgA(disparity);

    for (y = 0; y < height; y++) {
        for (x = 0; x < width; x++) {
            imgA[y][x] = 0;
        }
    }

    CvScalar s1;
    CvScalar s2;
    for (y = 0; y < height - N; y++) {
        //height-N
        for (x = 0; x < width - N; x++) {
            //width-N
            //getWindow(i,j,leftim,wl,N);
            cvSetImageROI(LeftinputImage32, cvRect(x, y, N, N));
            s1 = cvAvg(LeftinputImage32, NULL);
            cvSubS(LeftinputImage32, s1, leftWinImg, NULL); //zero-means
            cvNormalize(leftWinImg, leftWinImg, 1, 0, CV_L2, NULL);
            d = 0;

            //initialise localNCC
            for (m = 0; m < DSR; m++) {
                localNCC[m] = 0;
            }

            do {
                if (x - d >= 0) {

                    cvSetImageROI(RightinputImage32, cvRect(x - d, y, N, N));
                    s2 = cvAvg(RightinputImage32, NULL);
                    cvSubS(RightinputImage32, s2, rightWinImg, NULL); //zero-means
                    cvNormalize(rightWinImg, rightWinImg, 1, 0, CV_L2, NULL);
                } else {
                    break;
                }
                localNCC[d] = cvDotProduct(leftWinImg, rightWinImg);
                cvResetImageROI(RightinputImage32);
                d++;
            }            while (d <= DSR);

            //to find the best d and store
            imgA[y + offset][x + offset] = getMaxMin(localNCC, DSR, 1) *16;
            cvResetImageROI(LeftinputImage32);
        } //x
        if (y % 10 == 0)
            cout << "row=" << y << " of " << height << endl;
    } //y

    cvReleaseImage(&leftWinImg);
    cvReleaseImage(&rightWinImg);

    cvSaveImage("disparity.pgm", disparity);
    waitHere();
    //cv::imwrite("disparity.pgm",&disparity);
    cout << "Displaying Disparity image" << endl;
    // cvShowImage( "Disparity", disparity);
    //cv::waitKey(0);
    //return disparity;

}
Пример #17
0
void cvSubS_glue(const CvArr *src, CV_SCALAR_DECL(value), CvArr *dest, 
		 const CvArr *mask)
{
    cvSubS(src, CV_SCALAR(value), dest, mask);
}
Пример #18
0
void cv_SubS(CvArr* src, CvScalar* value, CvArr* dst, CvArr* mask) {
  cvSubS(src, *value, dst, mask);
}
Пример #19
0
// chain function - this function does the actual processing
static GstFlowReturn
gst_bgfg_acmmm2003_chain(GstPad *pad, GstBuffer *buf)
{
    GstBgFgACMMM2003 *filter;

    // sanity checks
    g_return_val_if_fail(pad != NULL, GST_FLOW_ERROR);
    g_return_val_if_fail(buf != NULL, GST_FLOW_ERROR);

    filter = GST_BGFG_ACMMM2003(GST_OBJECT_PARENT(pad));

    filter->image->imageData = (gchar*) GST_BUFFER_DATA(buf);

    // the bg model must be initialized with a valid image; thus we delay its
    // creation until the chain function
    if (filter->model == NULL) {
        filter->model = cvCreateFGDStatModel(filter->image, NULL);

        ((CvFGDStatModel*)filter->model)->params.minArea           = filter->min_area;
        ((CvFGDStatModel*)filter->model)->params.erode_iterations  = filter->n_erode_iterations;
        ((CvFGDStatModel*)filter->model)->params.dilate_iterations = filter->n_dilate_iterations;

        return gst_pad_push(filter->srcpad, buf);
    }

    cvUpdateBGStatModel(filter->image, filter->model, -1);

    // send mask event, if requested
    if (filter->send_mask_events) {
        GstStructure *structure;
        GstEvent     *event;
        GArray       *data_array;
        IplImage     *mask;

        // prepare and send custom event with the mask surface
        mask = filter->model->foreground;
        data_array = g_array_sized_new(FALSE, FALSE, sizeof(mask->imageData[0]), mask->imageSize);
        g_array_append_vals(data_array, mask->imageData, mask->imageSize);

        structure = gst_structure_new("bgfg-mask",
                                      "data",      G_TYPE_POINTER, data_array,
                                      "width",     G_TYPE_UINT,    mask->width,
                                      "height",    G_TYPE_UINT,    mask->height,
                                      "depth",     G_TYPE_UINT,    mask->depth,
                                      "channels",  G_TYPE_UINT,    mask->nChannels,
                                      "timestamp", G_TYPE_UINT64,  GST_BUFFER_TIMESTAMP(buf),
                                      NULL);

        event = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM, structure);
        gst_pad_push_event(filter->srcpad, event);
        g_array_unref(data_array);

        if (filter->display) {
            // shade the regions not selected by the acmmm2003 algorithm
            cvXorS(mask,          CV_RGB(255, 255, 255), mask,          NULL);
            cvSubS(filter->image, CV_RGB(191, 191, 191), filter->image, mask);
            cvXorS(mask,          CV_RGB(255, 255, 255), mask,          NULL);
        }
    }

    if (filter->send_roi_events) {
        CvSeq        *contour;
        CvRect       *bounding_rects;
        guint         i, j, n_rects;

        // count # of contours, allocate array to store the bounding rectangles
        for (contour = filter->model->foreground_regions, n_rects = 0;
             contour != NULL;
             contour = contour->h_next, ++n_rects);

        bounding_rects = g_new(CvRect, n_rects);

        for (contour = filter->model->foreground_regions, i = 0; contour != NULL; contour = contour->h_next, ++i)
            bounding_rects[i] = cvBoundingRect(contour, 0);

        for (i = 0; i < n_rects; ++i) {
            // skip collapsed rectangles
            if ((bounding_rects[i].width == 0) || (bounding_rects[i].height == 0)) continue;

            for (j = (i + 1); j < n_rects; ++j) {
                // skip collapsed rectangles
                if ((bounding_rects[j].width == 0) || (bounding_rects[j].height == 0)) continue;

                if (rect_overlap(bounding_rects[i], bounding_rects[j])) {
                    bounding_rects[i] = rect_collapse(bounding_rects[i], bounding_rects[j]);
                    bounding_rects[j] = NULL_RECT;
                }
            }
        }

        for (i = 0; i < n_rects; ++i) {
            GstEvent     *event;
            GstStructure *structure;
            CvRect        r;

            // skip collapsed rectangles
            r = bounding_rects[i];
            if ((r.width == 0) || (r.height == 0)) continue;

            structure = gst_structure_new("bgfg-roi",
                                          "x",         G_TYPE_UINT,   r.x,
                                          "y",         G_TYPE_UINT,   r.y,
                                          "width",     G_TYPE_UINT,   r.width,
                                          "height",    G_TYPE_UINT,   r.height,
                                          "timestamp", G_TYPE_UINT64, GST_BUFFER_TIMESTAMP(buf),
                                          NULL);

            event = gst_event_new_custom(GST_EVENT_CUSTOM_DOWNSTREAM, structure);
            gst_pad_send_event(filter->sinkpad, event);

            if (filter->verbose)
                GST_INFO("[roi] x: %d, y: %d, width: %d, height: %d\n",
                         r.x, r.y, r.width, r.height);

            if (filter->display)
                cvRectangle(filter->image, cvPoint(r.x, r.y), cvPoint(r.x + r.width, r.y + r.height),
                            CV_RGB(0, 0, 255), 1, 0, 0);
        }

        g_free(bounding_rects);
    }

    if (filter->display)
        gst_buffer_set_data(buf, (guchar*) filter->image->imageData, filter->image->imageSize);

    return gst_pad_push(filter->srcpad, buf);
}