Beispiel #1
0
/*!
 * \brief Generate a training set from a prototype sequence.
 *
 * The training set is create adding at each point of the prototype
 * sequence a gaussian noise.
 *
 * \param[in]   gesture prototype
 * \param[in]   number of seq in the training set
 * \return      training set
 */
CvMat* make_training_set (ptseq gesture, int num)
{
	ptseq *tmp;
	CvMat *training;

	rng_state = cvRNG(-1);
	tmp = (ptseq*)malloc(num * sizeof(ptseq));
	
	int i;
	for (i=0; i<num; i++) {
		tmp[i] = ptseq_init();

		add_awgn(gesture, tmp+i);
	}

	training = parametriz_training_set(tmp, num);

	return training;
}
Rect GetSmokeResult(Mat &src,Rect PedestrianRect,vector<Rect> helmetRectFirst)
{
	CvRNG rng;
	rng= cvRNG(cvGetTickCount());
	
	Rect HogHeadRect;
	vector<Rect> smokeRect;

	for(int j=0;j<helmetRectFirst.size();j++)
		{
			if(helmetRectFirst[j].width<10||helmetRectFirst[j].height<10)
			{
				continue;
			}else{
				HogHeadRect =GetHeadRect(helmetRectFirst[j]);
				vector<Rect> faces=GetFaceRect(src,HogHeadRect);
				
				for(int k=0;k<faces.size();k++)
				{
				//	smokeRect = GetSmokeRect(src,faces[k]);
					//检测人脸,并识别名字
					ExpandRectDifSize(src,faces[k],0,0);
					Mat faceImg = src(faces[k]);
					smokeRect = GetSmokeRect(faceImg,faces[k]);
					
					
					if(smokeRect.size()>0)
					{						
						rectangle(src,PedestrianRect,CV_RGB(255,0,0),3);
						string smokestr ="someone is smoking";
				//		putText(src, smokestr, Point(faces[k].x+faces[k].width,faces[k].y+faces[k].height), FONT_HERSHEY_COMPLEX, 1, Scalar(0, 0, 255), 3);
				//		imwrite("D://Error//"+to_string((long double)rng)+"smokeError.jpg",src);
						return PedestrianRect;
					}
				}
			}
	}
	return Rect(0,0,0,0);

}
Beispiel #3
0
CvLSH* cvCreateLSH(CvLSHOperations* ops, int d, int L, int k, int type, double r, int64 seed) {
  CvLSH* lsh = 0;
  CvRNG rng = cvRNG(seed);

  if (type != CV_32FC1 && type != CV_64FC1)
    CV_Error(CV_StsUnsupportedFormat, "vectors must be either CV_32FC1 or CV_64FC1");
  lsh = new CvLSH;
  try
  {
      lsh->type = type;
      switch (type) {
      case CV_32FC1: lsh->u.lsh_32f = new lsh_pstable_l2_32f(ops, d, L, k, r, rng); break;
      case CV_64FC1: lsh->u.lsh_64f = new lsh_pstable_l2_64f(ops, d, L, k, r, rng); break;
      }
  }
  catch(...)
  {
      delete lsh;
      throw;
  }

  return lsh;
}
Beispiel #4
0
/**
 * Creates a matrix for training but feeds it from images taken of folder
 * ./training; Only JPG images are taken into account and all shapes in those
 * images are clasified according to the first letter of the picture.
 */
void train() {
    training = true;
    mostrar = (flags & SH_T) != 0;
    listFiles(DIR_TR,training_image);
    fillMatrix();

    //printMatrix(t_data);
    //printMatrix(t_resp);

    CvMat *vartype = cvCreateMat( t_data->cols + 1, 1, CV_8U );
    unsigned char *vtype = vartype->data.ptr;

    //Tipos de variables de entrada al árbol
    vtype[0]=CV_VAR_NUMERICAL;
    vtype[1]=CV_VAR_NUMERICAL;
    vtype[2]=CV_VAR_NUMERICAL;
    vtype[3]=CV_VAR_NUMERICAL;

    vtype[4]=CV_VAR_CATEGORICAL; //Tipo de la salida del árbol

    if ((flags & F_CHK) != 0) {
        trainMask = cvCreateMat(t_data->rows, 1, CV_8U);
        unsigned char *x = trainMask->data.ptr;
        CvRNG seed = cvRNG(time(0));
        for (int i=0; i<t_data->rows; i++,x++) {
            double p = cvRandReal(&seed);
            if (p < probTrain) {
                *x = 1;
            } else {
                *x=0;
            }
        }
    }

    ptree = new CvDTree;
    ptree->train(t_data,CV_ROW_SAMPLE,t_resp,0,trainMask,vartype,0,CvDTreeParams());
}
int main(int argc, const char * argv[]) {
    
    IplImage* img = cvCreateImage( cvSize( 500, 500 ), 8, 3 );
    CvRNG rng = cvRNG(-1);
    
    cvNamedWindow( "fitline", 1 );
    for(;;) {
        char key;
        int i, count = cvRandInt(&rng) % 100 + 1, outliers = count/5;
        float a = cvRandReal(&rng) * 200;
        float b = cvRandReal(&rng) * 40;
        float angle = cvRandReal(&rng) * CV_PI;
        float cos_a = cos(angle), sin_a = sin(angle);
        CvPoint pt1, pt2;
        CvPoint* points = (CvPoint*)malloc( count * sizeof(points[0]));
        CvMat pointMat = cvMat( 1, count, CV_32SC2, points );
        float line[4];
        float d, t;
        
        b = MIN(a*0.3, b);
        
        // generate some points that are close to the line
        for( i = 0; i < count - outliers; i++ ) {
            float x = (cvRandReal(&rng)*2-1)*a;
            float y = (cvRandReal(&rng)*2-1)*b;
            points[i].x = cvRound(x*cos_a - y*sin_a + img->width/2);
            points[i].y = cvRound(x*sin_a + y*cos_a + img->height/2);
        }
        
        // generate "completely off" points
        for( ; i < count; i++ ) {
            points[i].x = cvRandInt(&rng) % img->width;
            points[i].y = cvRandInt(&rng) % img->height;
        }
        
        // find the optimal line
        cvFitLine( &pointMat, CV_DIST_L1, 1, 0.001, 0.001, line );
        cvZero( img );
        
        // draw the points
        for( i = 0; i < count; i++ ) {
            cvCircle( img, points[i], 2, i < count - outliers ? CV_RGB(255, 0, 0) : CV_RGB(255,255,0), CV_FILLED, CV_AA, 0 );
        }
        
        d = sqrt((double)line[0]*line[0] + (double)line[1]*line[1]);
        line[0] /= d;
        line[1] /= d;
        t = (float)(img->width + img->height);
        pt1.x = cvRound(line[2] - line[0]*t);
        pt1.y = cvRound(line[3] - line[1]*t);
        pt2.x = cvRound(line[2] + line[0]*t);
        pt2.y = cvRound(line[3] + line[1]*t);
        cvLine( img, pt1, pt2, CV_RGB(0,255,0), 3, CV_AA, 0 );
        
        cvShowImage( "fitline", img );
        
        key = (char) cvWaitKey(0);
        if( key == 27 )  break;
        free( points );
    }
    
    cvDestroyWindow( "fitline" );
    return 0;
}
void faceDbCreator(const char filePath[50],const char coordsFilename[100],
                                      const int startFile,const int endFile,
                                      const int noIterations,const int border){
  /**Number of Feature Points used in aligning images.**/
  const int noFeaturePoints             =   4;

  const int initialSize                 =   38;

  int i,j,k,iteration;

  /**No of files from DB added for alignment**/
  int noFiles                           =   0;
  double xr                             =   0;
  double yr                             =   0;
  int x,y;
  char filePathCopy[100];
  /**Corrds of the standards face with respect to initialSize**/
  CvMat *stdCoords                      =   cvCreateMat(noFeaturePoints*2,1,
                                                                  CV_64FC1);
  double stdCoordsData[]                =   {5+border,6+border,32+border,
                                            6+border,18+border,15+border,
                                                    18+border,25+border};
  stdCoords->data.db                    =   stdCoordsData;

  /**Average Coords of the faces aligned so far**/
  double avgData[noFeaturePoints*2];
  CvMat *avgMat                         =   cvCreateMat(noFeaturePoints*2,1,
                                                                    CV_64FC1);
  avgMat->data.db                       =   avgData;
  /**Coords to which other coordinates are aligned to**/
  double testData[noFeaturePoints*2];
  CvMat *testMat                        =   cvCreateMat(noFeaturePoints*2,1,
                                                                    CV_64FC1);
  testMat->data.db                      =   testData;

  cvCopy(stdCoords,testMat);

  double tempCoords[noFeaturePoints*2];

  /**Coords of all the image in the database**/
  CvMat* coords[endFile-startFile+1];

  double coordsData[endFile-startFile+1][noFeaturePoints*8];

  /**Face DB image file names**/
  char fileNames[endFile-startFile+1][100];
  char tempFileName[100];
  char tempStr[50];

  IplImage *img                         =   NULL;
  IplImage *dst                         =   NULL;

  FILE* coordsFile                      =   fopen(coordsFilename,"r+");
  FILE* t                               =   NULL;

  if (coordsFile){
    for (i=-startFile+1;i<=endFile-startFile;++i){
      if(!feof(coordsFile)){
        fscanf(coordsFile,"%s %lf %lf %lf %lf %lf %lf %lf %lf",&tempStr,
                                &tempCoords[0],&tempCoords[1],&tempCoords[2],
                                &tempCoords[3],&tempCoords[4],&tempCoords[5],
                                                &tempCoords[6],&tempCoords[7]);
        /**Skip the coords upto startImage**/
        if (i>=0){
          strcpy(tempFileName,filePath);
          strcat(tempFileName,tempStr);
          /**Check whether the file exists**/
          if (t=fopen(tempFileName,"r")){
            fclose(t);
            strcpy(fileNames[noFiles],tempFileName);

            coords[noFiles]             =  cvCreateMat(noFeaturePoints*2,4,
                                                                    CV_64FC1);
            faceDbCreatorFillData(coordsData[noFiles],tempCoords,noFeaturePoints);

            coords[noFiles]->data.db    =   coordsData[noFiles];

            ++noFiles;
          }
        }
      }
      else{
        noFiles                         =   i-1;
        break;
      }
    }
    fclose(coordsFile);

    if (!noFiles){
      printf("Face DB Creator Error: No File To Process\n");
      exit(EXIT_FAILURE);
    }
  }
  else {
    printf("Face DB Creator Error: Could Not Open Coords File\n");
    exit(EXIT_FAILURE);
  }

  /**PsuedoInverse**/
  CvMat *temp2                          =   cvCreateMat(4,1,CV_64FC1);
  double tempData2[4];
  temp2->data.db                        =   tempData2;

  for (iteration=0;iteration<noIterations;++iteration){
    cvSetZero(avgMat);
    for (i=0;i<noFiles;++i){
      pseudoInverse(coords[i],testMat,temp2);
      for (j=0;j<noFeaturePoints;++j){
        xr                              =   coordsData[i][j*8]*temp2->data.db[0]
                                            -coordsData[i][j*8+4]*
                                            temp2->data.db[1]+temp2->data.db[2];

        yr                              =   coordsData[i][j*8]*temp2->data.db[1]
                                            +coordsData[i][j*8+4]*
                                            temp2->data.db[0]+temp2->data.db[3];
        coordsData[i][j*8]              =   xr;
        coordsData[i][j*8+5]            =   xr;
        coordsData[i][j*8+1]            =   -yr;
        coordsData[i][j*8+4]            =   yr;
        avgData[j*2]                    +=  xr;
        avgData[j*2+1]                  +=  yr;
      }

      img                               =   cvLoadImage(fileNames[i],
                                                      CV_LOAD_IMAGE_GRAYSCALE);

      dst                               =   cvCreateImage(cvSize(initialSize+
                                                2*border,initialSize+2*border),
                                                    img->depth,img->nChannels);
      cvSetZero(dst);

      double a                          =   temp2->data.db[0];
      double b                          =   temp2->data.db[1];
      double det                        =   a*a+b*b;
      double tx                         =   temp2->data.db[2];
      double ty                         =   temp2->data.db[3];

      /**Transform the image**/
      for (j=0;j<dst->height;++j){
        for (k=0;k<dst->width;++k){
          xr                            =   ((k-tx)*a+(j-ty)*b)/det;
          yr                            =   ((k-tx)*-b+(j-ty)*a)/det;
          if ((int)xr>=0 && (int)xr <img->width && (int)yr>=0
                                                 && (int)yr<img->height){
            *((unsigned char*)(dst->imageData)+j*dst->widthStep+k)=
                                    *((unsigned char*)(img->imageData)+
                                           (int)yr*img->widthStep+(int)xr);
          }

        }
      }
      cvSaveImage(fileNames[i],dst);
      cvReleaseImage(&img);
      cvReleaseImage(&dst);

    }

    /**Averge of the transformation performed so far**/
    for (j=0;j<noFeaturePoints*2;++j){
      avgData[j]                        /=  endFile-startFile+1;
    }
    /**Perform transformation on the average data**/
    CvMat* tempMat                      =   cvCreateMat(noFeaturePoints*2,4,
                                                                      CV_64FC1);
    double tempMatData[noFeaturePoints*8];
    tempMat->data.db                    =   tempMatData;
    faceDbCreatorFillData(tempMatData,avgData,noFeaturePoints);

    pseudoInverse(tempMat,stdCoords,temp2);

    for (j=0;j<noFeaturePoints;++j){
      testData[j*2]                     =   avgData[j*2]*temp2->data.db[0]-
                                            avgData[j*2+1]*temp2->data.db[1]+
                                                             temp2->data.db[2];
      testData[j*2+1]                   =   avgData[j*2]*temp2->data.db[1]+
                                            avgData[j*2+1]*temp2->data.db[0]+
                                                             temp2->data.db[3];
    }
    cvReleaseMat(&tempMat);
  }

  IplImage *img8U,*img64F;
  CvRect *cropArea;

  IplImage *finalImage32F               =   cvCreateImage(cvSize(CROPPED_WIDTH,
                                               CROPPED_HEIGHT),IPL_DEPTH_32F,1);
  IplImage *finalImage8U                =   cvCreateImage(cvSize(CROPPED_WIDTH,
                                                CROPPED_HEIGHT),IPL_DEPTH_8U,1);
  IplImage *transformImage64F;
  IplImage *transformImage32F;
  IplImage *croppedImage32F             =   cvCreateImage(cvSize(initialSize,
                                                  initialSize),IPL_DEPTH_32F,1);
  IplImage *croppedImage64F             =   cvCreateImage(cvSize(initialSize,
                                                  initialSize),IPL_DEPTH_64F,1);

  IplImage* mask                        =   cvCreateImage(cvGetSize
                                              (croppedImage64F),IPL_DEPTH_8U,1);
  maskGenerator(mask);

  /**Random transformations**/
  double scale                          =   0;
  double rotate                         =   0;
  double translateX                     =   0;
  double translateY                     =   0;

  tempStr[0]                            =   '_';
  tempStr[4]                            =   '.';
  tempStr[5]                            =   'j';
  tempStr[6]                            =   'p';
  tempStr[7]                            =   'g';
  tempStr[8]                            =   '\0';

  /**Random Number Generator**/
  CvRNG rg;

  for (i=0;i<noFiles;++i){
    img8U                               =   cvLoadImage(fileNames[i],
                                                       CV_LOAD_IMAGE_GRAYSCALE);
    img64F                              =   cvCreateImage(cvGetSize(img8U),
                                                               IPL_DEPTH_64F,1);
    cvConvertScale(img8U,img64F);
    cvReleaseImage(&img8U);

    remove(fileNames[i]);

    xr                                  =   coordsData[i][0]-stdCoordsData[0]+
                                                                         border;
    yr                                  =   coordsData[i][4]-stdCoordsData[1]+
                                                                         border;
    cvSetImageROI(img64F,cvRect(cvRound(xr),cvRound(yr),initialSize,
                                                            initialSize));
    cvCopy(img64F,croppedImage64F);

    /**Creating variations for each image**/
    for (j=0;j<NO_VARIATIONS;++j){
      lightingCorrection(croppedImage64F,mask);
      rg                                =   cvRNG(time(0)*1000*(i+20)*(j+30));

      cvConvertScale(croppedImage64F,croppedImage32F);
      cvResize(croppedImage32F,finalImage32F);
      cvConvertScale(finalImage32F,finalImage8U);
      tempStr[1]                        =   (j/100)%10+48;
      tempStr[2]                        =   (j/10)%10+48;tempStr[3]=j%10+48;

      strncpy(tempFileName,fileNames[i],strlen(fileNames[i])-4);

      tempFileName[strlen(fileNames[i])-4]
                                        ='\0';
      strcat(tempFileName,tempStr);

      cvSaveImage(tempFileName,finalImage8U);
      switch (cvRandInt(&rg)%3){
        /**Scaling**/
        case 0:
          if (cvRandInt(&rg)%2)
            scale                       =   cvRandReal(&rg)*MAX_SCALE*
                                            initialSize/CROPPED_WIDTH;
          else
            scale                       =   cvRandReal(&rg)*MIN_SCALE*
                                            initialSize/CROPPED_HEIGHT;

          transformImage64F             =   cvCreateImage(
                                            cvSize(cvRound(initialSize-2*scale),
                                            cvRound(initialSize-2*scale)),
                                            IPL_DEPTH_64F,1);

          transformImage32F             =   cvCreateImage(
                                            cvSize(cvRound(initialSize-2*scale),
                                            cvRound(initialSize-2*scale)),
                                            IPL_DEPTH_32F,1);

          cvSetImageROI(img64F,cvRect(cvRound(xr+scale),cvRound(yr+scale),
                    cvRound(initialSize-2*scale),cvRound(initialSize-2*scale)));

          cvCopy(img64F,transformImage64F);
          cvConvertScale(transformImage64F,transformImage32F);

          cvResize(transformImage32F,croppedImage32F);
          cvConvertScale(croppedImage32F,croppedImage64F);
          cvReleaseImage(&transformImage64F);
          cvReleaseImage(&transformImage32F);
          break;
        /**Rotation**/
        case 1:
          if (cvRandInt(&rg)%2)
            rotate                      =   cvRandReal(&rg)*MAX_ROTATE;
          else
            rotate                      =   cvRandReal(&rg)*MIN_ROTATE;

          cvResetImageROI(img64F);

          transformImage64F             =   cvCreateImage(cvGetSize(img64F),
                                                            IPL_DEPTH_64F,1);
          transformRotate(img64F,transformImage64F,
          &cvPoint2D64f(xr+initialSize/2,yr+initialSize/2),rotate*M_PI/180);

          cvSetImageROI(transformImage64F,
                            cvRect(xr,yr,initialSize,initialSize));

          cvCopy(transformImage64F,croppedImage64F);
          cvReleaseImage(&transformImage64F);
          break;
        default:
          /**Translation**/
          if (cvRandInt(&rg)%2){
            if (cvRandInt(&rg)%2){
              translateX                =   cvRandReal(&rg)*MAX_TRANSLATE*
                                                    initialSize/CROPPED_WIDTH;
              translateY                =   cvRandReal(&rg)*MAX_TRANSLATE*
                                                    initialSize/CROPPED_HEIGHT;
            }
            else{
              translateX                =   cvRandReal(&rg)*MIN_TRANSLATE*
                                                    initialSize/CROPPED_WIDTH;
              translateY                =   cvRandReal(&rg)*MIN_TRANSLATE*
                                                    initialSize/CROPPED_HEIGHT;
            }
          }
          else{
            if (cvRandInt(&rg)%2){
              translateX                =   cvRandReal(&rg)*MAX_TRANSLATE*
                                                    initialSize/CROPPED_WIDTH;
              translateY                =   cvRandReal(&rg)*MIN_TRANSLATE*
                                                    initialSize/CROPPED_HEIGHT;
            }
            else{
              translateX                =   cvRandReal(&rg)*MIN_TRANSLATE*
                                                    initialSize/CROPPED_WIDTH;
              translateY                =   cvRandReal(&rg)*MAX_TRANSLATE*
                                                    initialSize/CROPPED_HEIGHT;
            }
          }
          cvSetImageROI(img64F,cvRect(cvRound(xr+translateX),
                              cvRound(yr+translateY),initialSize,initialSize));
          cvCopy(img64F,croppedImage64F);
      }
    }
    cvReleaseImage(&img64F);
    cvReleaseMat(&coords[i]);
  }
  cvReleaseImage(&finalImage8U);
  cvReleaseImage(&finalImage32F);
  cvReleaseImage(&croppedImage32F);
  cvReleaseImage(&croppedImage64F);
  cvReleaseMat(&stdCoords);
  cvReleaseMat(&testMat);
  cvReleaseMat(&avgMat);
  cvReleaseMat(&temp2);
}
static CvStatus
icvFitLine3D( CvPoint3D32f * points, int count, int dist,
              float _param, float reps, float aeps, float *line )
{
    double EPS = count*FLT_EPSILON;
    void (*calc_weights) (float *, int, float *) = 0;
    void (*calc_weights_param) (float *, int, float *, float) = 0;
    float *w;                   /* weights */
    float *r;                   /* square distances */
    int i, j, k;
    float _line[6], _lineprev[6];
    float rdelta = reps != 0 ? reps : 1.0f;
    float adelta = aeps != 0 ? aeps : 0.01f;
    double min_err = DBL_MAX, err = 0;
    CvRNG rng = cvRNG(-1);

    memset( line, 0, 6*sizeof(line[0]) );

    switch (dist)
    {
    case CV_DIST_L2:
        return icvFitLine3D_wods( points, count, 0, line );

    case CV_DIST_L1:
        calc_weights = icvWeightL1;
        break;

    case CV_DIST_L12:
        calc_weights = icvWeightL12;
        break;

    case CV_DIST_FAIR:
        calc_weights_param = icvWeightFair;
        break;

    case CV_DIST_WELSCH:
        calc_weights_param = icvWeightWelsch;
        break;

    case CV_DIST_HUBER:
        calc_weights_param = icvWeightHuber;
        break;

    /*case CV_DIST_USER:
        _PFP.p = param;
        calc_weights = (void ( * )(float *, int, float *)) _PFP.fp;
        break;*/

    default:
        return CV_BADFACTOR_ERR;
    }

    w = (float *) cvAlloc( count * sizeof( float ));
    r = (float *) cvAlloc( count * sizeof( float ));

    for( k = 0; k < 20; k++ )
    {
        int first = 1;
        for( i = 0; i < count; i++ )
            w[i] = 0.f;
        
        for( i = 0; i < MIN(count,10); )
        {
            j = cvRandInt(&rng) % count;
            if( w[j] < FLT_EPSILON )
            {
                w[j] = 1.f;
                i++;
            }
        }

        icvFitLine3D_wods( points, count, w, _line );
        for( i = 0; i < 30; i++ )
        {
            double sum_w = 0;

            if( first )
            {
                first = 0;
            }
            else
            {
                double t = _line[0] * _lineprev[0] + _line[1] * _lineprev[1] + _line[2] * _lineprev[2];
                t = MAX(t,-1.);
                t = MIN(t,1.);
                if( fabs(acos(t)) < adelta )
                {
                    float x, y, z, ax, ay, az, dx, dy, dz, d;

                    x = _line[3] - _lineprev[3];
                    y = _line[4] - _lineprev[4];
                    z = _line[5] - _lineprev[5];
                    ax = _line[0] - _lineprev[0];
                    ay = _line[1] - _lineprev[1];
                    az = _line[2] - _lineprev[2];
                    dx = (float) fabs( y * az - z * ay );
                    dy = (float) fabs( z * ax - x * az );
                    dz = (float) fabs( x * ay - y * ax );

                    d = dx > dy ? (dx > dz ? dx : dz) : (dy > dz ? dy : dz);
                    if( d < rdelta )
                        break;
                }
            }
            /* calculate distances */
            if( icvCalcDist3D( points, count, _line, r ) < FLT_EPSILON*count )
                break;

            /* calculate weights */
            if( calc_weights )
                calc_weights( r, count, w );
            else
                calc_weights_param( r, count, w, _param );

            for( j = 0; j < count; j++ )
                sum_w += w[j];

            if( fabs(sum_w) > FLT_EPSILON )
            {
                sum_w = 1./sum_w;
                for( j = 0; j < count; j++ )
                    w[j] = (float)(w[j]*sum_w);
            }
            else
            {
                for( j = 0; j < count; j++ )
                    w[j] = 1.f;
            }

            /* save the line parameters */
            memcpy( _lineprev, _line, 6 * sizeof( float ));

            /* Run again... */
            icvFitLine3D_wods( points, count, w, _line );
        }

        if( err < min_err )
        {
            min_err = err;
            memcpy( line, _line, 6 * sizeof(line[0]));
            if( err < EPS )
                break;
        }
    }

    // Return...
    cvFree( &w );
    cvFree( &r );
    return CV_OK;
}
Beispiel #8
0
int main()
{
	#define MAX_CLUSTER 5
	CvScalar color_tab[MAX_CLUSTER];
	IplImage* img = cvCreateImage(cvSize(500,500) , 8 , 3);
	CvRNG rng = cvRNG(0xffffffff);

	color_tab[0] = CV_RGB(255 ,   0 ,   0);
	color_tab[1] = CV_RGB(  0 , 255 ,   0);
	color_tab[2] = CV_RGB(100 , 100 , 255);
	color_tab[3] = CV_RGB(255 ,   0 , 255);
	color_tab[4] = CV_RGB(255 , 255 ,   0);

	cvNamedWindow("clusters" , 1);

	while(1)
	{
		int cluster_count = cvRandInt(&rng)%MAX_CLUSTER + 1;
		int sample_count = cvRandInt(&rng)%1000 + 1;
		CvMat* points = cvCreateMat(sample_count , 1 , CV_32FC2);
		CvMat* clusters = cvCreateMat(sample_count , 1 ,CV_32SC1);
		int k;
		for (k = 0 ; k<cluster_count ; k++)
		{
			CvPoint center;
			CvMat point_chunk;
			center.x = cvRandInt(&rng)%(img->width);
			center.y = cvRandInt(&rng)%(img->height);

			cvGetRows( points , &point_chunk ,
					   k*sample_count/cluster_count ,
					   (k+1)*sample_count/cluster_count ,
					   1);

			cvRandArr(&rng , &point_chunk , CV_RAND_NORMAL ,
					  cvScalar(center.x , center.y , 0 , 0),
					  cvScalar(img->width/6 , img->height/6 , 0 , 0) );
		}

		int i;
		for (i = 0; i<sample_count/2 ; i++)
		{//random find two and exchange
			CvPoint2D32f* pt1 = (CvPoint2D32f*)points->data.fl + cvRandInt(&rng)%sample_count;
			CvPoint2D32f* pt2 = (CvPoint2D32f*)points->data.fl + cvRandInt(&rng)%sample_count;
			CvPoint2D32f temp;
			CV_SWAP(*pt1 , *pt2 , temp);
		}

		cvKMeans2(points , cluster_count , clusters ,
				  cvTermCriteria(CV_TERMCRIT_EPS+CV_TERMCRIT_ITER,10,1.0),
				  1, 0, 0, 0, 0);

		cvZero(img);
		for (i = 0; i<sample_count/2 ; i++)
		{
			CvPoint2D32f pt = ((CvPoint2D32f*)points->data.fl)[i];
			int cluster_idx = clusters->data.i[i];
			cvCircle(img , cvPointFrom32f(pt), 2, color_tab[cluster_idx] ,
					CV_FILLED, 8, 0);
		}

		cvReleaseMat(&points);
		cvReleaseMat(&clusters);

		cvShowImage("clusters" , img);

		int key = cvWaitKey(0);
		if(key == 27)
				break;
	}//while(1)

	return 0;
}
void BreakingPointsScreen::setup(string _name, ofPoint _position, ofPoint _size, ofxThreadedImageLoader* _loader) {

//    float imageWidth = 1280;
//    float imageHeight = 800;

//    currentImage.allocate(imageWidth,imageHeight, OF_IMAGE_COLOR);
//    nextImage.allocate(imageWidth,imageHeight, OF_IMAGE_COLOR);
//
//    currentWatershed.allocate(imageWidth,imageHeight, OF_IMAGE_GRAYSCALE);
//    nextWatershed.allocate(imageWidth,imageHeight, OF_IMAGE_GRAYSCALE);

    name = _name;

    position = _position;
    size = _size;

    imageDirectory.allowExt("tif");
    int numberOfImages = imageDirectory.listDir("imagery");

    watershedDirectory.allowExt("tif");
    int numberOfWatersheds = watershedDirectory.listDir("watersheds");

    cout << "Number of images " << numberOfImages << endl;
    cout << "Number of watersheds " << numberOfWatersheds << endl;

    loader = _loader;

    if( (numberOfImages > 0) && (numberOfWatersheds > 0) && (numberOfImages == numberOfWatersheds)) { //files exist, and there is a match in count
        string currentImageFileName = randomFileFromDirectory(&imageDirectory);
        string nextImageFileName = randomFileFromDirectory(&imageDirectory);

        loader->loadFromDisk(&current.theImage, "imagery/"+currentImageFileName);
        loader->loadFromDisk(&next.theImage, "imagery/"+nextImageFileName);

        loader->loadFromDisk(&current.theWatershed, "watersheds/"+currentImageFileName);
        loader->loadFromDisk(&next.theWatershed, "watersheds/"+nextImageFileName);

//        loader.startThread(false, false);
    } else {
        cerr << "Error loading images or watersheds, " << numberOfImages << " images found, with " << numberOfWatersheds << " watersheds found. " << endl;
    }

    bWaiting = false;
    timeOfStartOfWait = 0.f;
    waitPeriod = 5.f;

    current.rng = cvRNG(-1);
    next.rng = cvRNG(-1);

    ofxCvColorImage	originalImage;
    originalImage.allocate(1280,800); //naughty using constant value here!

    current.wshed = cvCloneImage( originalImage.getCvImage() );
    current.marker_mask = cvCreateImage( cvGetSize(current.wshed), 8, 1 ); //8 bit depth, 1 channel
    current.markers = cvCreateImage( cvGetSize(current.wshed), IPL_DEPTH_32S, 1 ); // 32bit short depth 1 channel

    next.wshed = cvCloneImage( originalImage.getCvImage() );
    next.marker_mask = cvCreateImage( cvGetSize(next.wshed), 8, 1 );//8 bit depth, 1 channel
    next.markers = cvCreateImage( cvGetSize(next.wshed), IPL_DEPTH_32S, 1 );    // 32bit short depth 1 channel

    current.watershedDone = false;
    next.watershedDone = false;

    timeOfLastFrame = ofGetElapsedTimef();
}
int main( int argc, char *argv[] ) {

    int webcamRun = 0;
    
    CvCapture *capture = 0;
    CvCapture *stereoCapture = 0;
    
    CvCapture **cptPtr = &capture;
    CvCapture **scptPtr = &stereoCapture;
    
    initCaptureFiles( argc, argv, cptPtr, scptPtr, &webcamRun, &enable3D );
  
  
  
  char locationMean[300];
  char locationCov[300];
  strncpy( locationMean, argv[1], 300 );
  strncpy(  locationCov, argv[2], 300 );
  
  
  
  /* check for stereo file */  
  
  
  
  if( !enable3D ) {
    displayStereoFrame   = 0;
    displayPositionFrame = 0;
    depthScaling         = 0;
  }
  
    
  /* Fast-forwards through the video to the action */
  int kl;
  for( kl = 0; kl < 100 && webcamRun == 0; kl++ ) {
    cvQueryFrame( capture );
    if( enable3D ) cvQueryFrame( stereoCapture );
  }
    
    
  /*
      Housekeeping
  */
  
  int maxBox[10];
  // this should be in the relevant file
  //! Create matrix files to hold intermediate calculations for likelihood
  
  
  
  


  /* this should have its own matrix setup function and not be hard-coded */
  //! Create N-dimensional matrices to hold mean and cov data
  int backMeanSizes[] = {_sizeY,_sizeX,4,_numResponses};
  int backCovSizes[]  = {_sizeY,_sizeX,4,_numResponses};

  CvMatND *backMean = cvCreateMatND( 4, backMeanSizes, CV_32F );
  CvMatND *backCov  = cvCreateMatND( 4, backCovSizes,  CV_32F );

  /* Fix this */
  if( webcamRun ) {
    backMean = (CvMatND*) cvLoad( "D:/Will/Dropbox/My Dropbox/Project/Matlab/backMean.xml", NULL, NULL, NULL );
    backCov  = (CvMatND*) cvLoad( "D:/Will/Dropbox/My Dropbox/Project/Matlab/backCov.xml", NULL, NULL, NULL );
  } else {
    backMean = (CvMatND*) cvLoad( locationMean, NULL, NULL, NULL );
    backCov  = (CvMatND*) cvLoad( locationCov, NULL, NULL, NULL );
  }
  
  /* end here */
  
  
  CvMat *imgLikelihood = cvCreateMat( 48, 64, CV_32F );
  


  img  = cvLoadImage( "foreground8/image (1).jpg", CV_LOAD_IMAGE_COLOR );
  yImg = cvLoadImage( "foreground8/image (1).jpg", CV_LOAD_IMAGE_COLOR );
  cvNamedWindow( "Tracker", CV_WINDOW_AUTOSIZE );
  
  
  /* Condensation stuff */
  ConDens = cvCreateConDensation( DP, MP, nSamples );
  
  bx = 320; by = 240;
	//hm = 0; //vm = 0;

  /* Initialize the random number generator */
	rng_state = cvRNG(0xffffffff);
 
  initializeCondensation();
  
  /* This allows us to change the probabiity with which Condensation alters each variable */
  cvRandInit( &(ConDens->RandS[0]), -75, 75, 0, CV_RAND_UNI);
  cvRandInit( &(ConDens->RandS[1]),  -5,  5, 1, CV_RAND_UNI);
  cvRandInit( &(ConDens->RandS[2]),  -5,  5, 2, CV_RAND_UNI);
  cvRandInit( &(ConDens->RandS[3]),  -2,  2, 3, CV_RAND_UNI);
  
  cvRandInit( &(ConDens->RandS[4]), -75, 75, 4, CV_RAND_UNI);
  cvRandInit( &(ConDens->RandS[5]),  -5,  5, 5, CV_RAND_UNI);
  cvRandInit( &(ConDens->RandS[6]),  -5,  5, 6, CV_RAND_UNI);
  cvRandInit( &(ConDens->RandS[7]),  -2,  2, 7, CV_RAND_UNI);
  
  /*
    If we have depth scaling, the depth controls the width & height of the box
    So we don't want any randomness
  */
  if( depthScaling ) {
    cvRandInit( &(ConDens->RandS[3]),  0,  0, 3, CV_RAND_UNI);
    cvRandInit( &(ConDens->RandS[7]),  0,  0, 7, CV_RAND_UNI);
  }

  
  IplImage* heatFrame = NULL;
  heatFrame = cvQueryFrame( capture );
  if(enable3D) cvQueryFrame( stereoCapture ); // making sure they stay in sync (and init positionFrame)
  
  
  int frameNumb = 0;
  // int mjk;
  // for( mjk = 0; mjk < 320; mjk++ ) {
    // cvQueryFrame( capture );
    // cvQueryFrame( stereoCapture );
    // frameNumb++;
  // }
  

  
  positionFrame = cvCreateImage(cvSize(640, 510), 8, 3);
  
  
  int trailLength = 20;
  int planX1Pos[20] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};
  int planX2Pos[20] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};
  int planZ1Pos[20] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};
  int planZ2Pos[20] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};
  

  int key = 0;
  double totalTime = 0;
  int totalFrames = 0;
  
  
  
  while( key != 'q' ) {
  
    // stopCount++;
    // strcpy(frameNameTemp, frameName);
    // itoa(stopCount, frameCount, 10);
    // strcat( frameNameTemp, frameCount);
    // strcat(frameNameTemp, ".jpg");
    // cvSaveImage(frameNameTemp, positionFrame, 0);
    
    
    /* Start timing */
    clock_t start = clock();
        
        frameNumb++;
        printf("Frame %d\n", frameNumb);
        if( frameNumb == 210 ) {
          for( ; frameNumb < 290; frameNumb++ ) {
            cvQueryFrame( capture );
            cvQueryFrame( stereoCapture );
          }
        }
        
        if( frameNumb == 350 ) {
          return(0);
        }
        
        /* Get the first video frame, and maybe stereo frame */
        frame = cvQueryFrame( capture );
        if(enable3D) stereoFrame = cvQueryFrame( stereoCapture );
        
        
        /* Double-check that we haven't reached the end */
        if( !frame ) break;
        if( enable3D ) if( !stereoFrame ) break;
        
        
        /* Compute likelihoods for new frame, using mean and cov */
        likelihood( frame, backMean, backCov, imgLikelihood );
        
        /* Update the Condensation model using new likelihoods */
        updateCondensation( 0, 0, imgLikelihood, maxBox );

        /* Maybe display the Condensation particles */
        if( displayParticles ) 
          drawParticles();

        /* Draw tracking boxes onto the video and stereo video frame */
        drawTrackingBoxes( 0, maxBox, frame, stereoFrame );
        drawTrackingBoxes( 1, maxBox, frame, stereoFrame );

        /* Show the latest video frame (with boxes and particles) */
        cvShowImage( "Tracker", frame );

        /* Maybe show latest stereo depth frame */
        if( displayStereoFrame )
          cvShowImage( "Stereo", stereoFrame );
      
        /* Maybe show latest position map */
        if( displayPositionFrame )
          drawPositionTrail( maxBox, trailLength, 
                             depth, 
                             planX1Pos, planX2Pos, planZ1Pos, 
                             planZ2Pos, positionFrame );
        
        /* Maybe show the heat map of the input image (the likelihood for each patch) */
        if( displayHeatFrame ) {
          drawHeatmap( maxBox, imgLikelihood, heatFrame );
          cvShowImage( "Heat", heatFrame );
        }
        
        /* Update previous x, y positions */
        prevX[0] = maxBox[0]; prevY[0] = maxBox[1];
        prevX[1] = maxBox[4]; prevY[1] = maxBox[5];
        
        
        /* Calculate fps and average fps */
        printf("fps: %3.1f, ", (double)1/(((double)clock() - start) / CLOCKS_PER_SEC));
        totalTime = totalTime + (double)1/(((double)clock() - start) / CLOCKS_PER_SEC);
        totalFrames++;
        printf(" average fps %3.1f", totalTime/totalFrames);
        
        /* Check for key presses */
        key = cvWaitKey( 50 );
        
        /* Enable debugging mode */
        if( key == 'd' ) {
          debug = 1;
          printf("\n*********\n\nDebug: ON\n\n*********\n");
        }
        
        /* Enable debugging mode */
        if( key == 'p' ) {
          if( stateDraw )
            stateDraw = 0;
          else
            stateDraw = 1;
          printf("\n*********\n\nParticle Drawing Toggled\n\n*********\n");
        }

    }
  
  printf("\n\nquitting");

//cvReleaseVideoWriter( &writerTracker );
cvReleaseImage( &img );
cvReleaseImage( &frame );
cvDestroyWindow( "Tracker" );
cvDestroyWindow( "Heat" );
cvDestroyWindow( "Stereo" );
cvDestroyWindow( "Position" );

return(0);

}
Beispiel #11
0
int main(int argc, char* argv[])
{
    int i, j;
    CvMemStorage* storage = cvCreateMemStorage(0);
    IplImage* img = cvCreateImage( cvSize(w,w), 8, 1 );
    IplImage* img32f = cvCreateImage( cvSize(w,w), IPL_DEPTH_32F, 1 );
    IplImage* img32s = cvCreateImage( cvSize(w,w), IPL_DEPTH_32S, 1 );
    IplImage* img3 = cvCreateImage( cvSize(w,w), 8, 3 );
    (void)argc; (void)argv;

    help();
    cvZero( img );

    for( i=0; i < 6; i++ )
    {
        int dx = (i%2)*250 - 30;
        int dy = (i/2)*150;
        CvScalar white = cvRealScalar(255);
        CvScalar black = cvRealScalar(0);

        if( i == 0 )
        {
            for( j = 0; j <= 10; j++ )
            {
                double angle = (j+5)*CV_PI/21;
                cvLine(img, cvPoint(cvRound(dx+100+j*10-80*cos(angle)),
                    cvRound(dy+100-90*sin(angle))),
                    cvPoint(cvRound(dx+100+j*10-30*cos(angle)),
                    cvRound(dy+100-30*sin(angle))), white, 3, 8, 0);
            }
        }

        cvEllipse( img, cvPoint(dx+150, dy+100), cvSize(100,70), 0, 0, 360, white, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+115, dy+70), cvSize(30,20), 0, 0, 360, black, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+185, dy+70), cvSize(30,20), 0, 0, 360, black, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+115, dy+70), cvSize(15,15), 0, 0, 360, white, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+185, dy+70), cvSize(15,15), 0, 0, 360, white, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+115, dy+70), cvSize(5,5), 0, 0, 360, black, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+185, dy+70), cvSize(5,5), 0, 0, 360, black, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+150, dy+100), cvSize(10,5), 0, 0, 360, black, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+150, dy+150), cvSize(40,10), 0, 0, 360, black, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+27, dy+100), cvSize(20,35), 0, 0, 360, white, -1, 8, 0 );
        cvEllipse( img, cvPoint(dx+273, dy+100), cvSize(20,35), 0, 0, 360, white, -1, 8, 0 );
    }

    cvNamedWindow( "image", 1 );
    cvShowImage( "image", img );
    cvConvert( img, img32f );
    findCComp( img32f );
    cvConvert( img32f, img32s );

    cvFindContours( img32s, storage, &contours, sizeof(CvContour),
                    CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0) );

    //cvFindContours( img, storage, &contours, sizeof(CvContour),
    //                CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0) );


    {
    const char* attrs[] = {"recursive", "1", 0};
    cvSave("contours.xml", contours, 0, 0, cvAttrList(attrs, 0));
    contours = (CvSeq*)cvLoad("contours.xml", storage, 0, 0);
    }

    // comment this out if you do not want approximation
    contours = cvApproxPoly( contours, sizeof(CvContour), storage, CV_POLY_APPROX_DP, 3, 1 );

    cvNamedWindow( "contours", 1 );
    cvCreateTrackbar( "levels+3", "contours", &levels, 7, on_trackbar );

    {
        CvRNG rng = cvRNG(-1);

        CvSeq* tcontours = contours;
        cvCvtColor( img, img3, CV_GRAY2BGR );
        while( tcontours->h_next )
            tcontours = tcontours->h_next;

        for( ; tcontours != 0; tcontours = tcontours->h_prev )
        {
            CvScalar color;
            color.val[0] = cvRandInt(&rng) % 256;
            color.val[1] = cvRandInt(&rng) % 256;
            color.val[2] = cvRandInt(&rng) % 256;
            color.val[3] = cvRandInt(&rng) % 256;
            cvDrawContours(img3, tcontours, color, color, 0, -1, 8, cvPoint(0,0));
            if( tcontours->v_next )
            {
                color.val[0] = cvRandInt(&rng) % 256;
                color.val[1] = cvRandInt(&rng) % 256;
                color.val[2] = cvRandInt(&rng) % 256;
                color.val[3] = cvRandInt(&rng) % 256;
                cvDrawContours(img3, tcontours->v_next, color, color, 1, -1, 8, cvPoint(0,0));
            }
        }

    }

    cvShowImage( "colored", img3 );
    on_trackbar(0);
    cvWaitKey(0);
    cvReleaseMemStorage( &storage );
    cvReleaseImage( &img );
    cvReleaseImage( &img32f );
    cvReleaseImage( &img32s );
    cvReleaseImage( &img3 );

    return 0;
}
Beispiel #12
0
int main( int argc, char** argv )
{

  IplImage* img;  			 		// input image object 
  IplImage* grayImg = NULL;  		// tmp image object
  IplImage* thresholdedImg = NULL;  // threshold output image object
  IplImage* dst;					// output connected components
	
  int windowSize = 3; // starting threshold value
  int constant = 0; // starting constant value
  CvCapture* capture = NULL; // capture object
	
  const char* windowName1 = "OPENCV: adaptive image thresholding"; // window name
  const char* windowName2 = "OPENCV: grayscale image"; // window name
  const char* windowName3 = "OPENCV: adaptive threshold image"; // window name

	
  bool keepProcessing = true;	// loop control flag
  char key;						// user input	
  int  EVENT_LOOP_DELAY = 40;	// delay for GUI window
                                // 40 ms equates to 1000ms/25fps = 40ms per frame	
	
	
	
  // if command line arguments are provided try to read image/video_name
  // otherwise default to capture from attached H/W camera 

    if( 
	  ( argc == 2 && (img = cvLoadImage( argv[1], 1)) != 0 ) ||
	  ( argc == 2 && (capture = cvCreateFileCapture( argv[1] )) != 0 ) || 
	  ( argc != 2 && (capture = cvCreateCameraCapture( CAMERA_INDEX )) != 0 )
	  )
    {
      // create window objects

      cvNamedWindow(windowName1, 0 );
      cvNamedWindow(windowName2, 0 );
      cvNamedWindow(windowName3, 0 );		

	  // add adjustable trackbar for threshold parameter
		
      cvCreateTrackbar("Neighbourhood (N)", windowName3, &windowSize, 255, NULL);		
	  cvCreateTrackbar("Constant (C)", windowName3, &constant, 50, NULL);
		
	  // if capture object in use (i.e. video/camera)
	  // get initial image from capture object
			
	  if (capture) {
	
		  // cvQueryFrame s just a combination of cvGrabFrame 
		  // and cvRetrieveFrame in one call.
			  
		  img = cvQueryFrame(capture);
		  if(!img){
			if (argc == 2){				  
				printf("End of video file reached\n");
			} else {
				printf("ERROR: cannot get next fram from camera\n");
			}
			exit(0);
		  }
			  
	  }
	  
	  // create output image
	  
	  thresholdedImg = cvCreateImage(cvSize(img->width,img->height), 
						   img->depth, 1);
	  thresholdedImg->origin = img->origin;
	  grayImg = cvCreateImage(cvSize(img->width,img->height), 
				img->depth, 1);
	  grayImg->origin = img->origin;

	  dst = cvCloneImage(img);
	  
	  // create a set of random labels
	  	  
	  CvRNG rng = cvRNG(-1);
	  CvMat* color_tab = cvCreateMat( 1, 255, CV_8UC3 );
            for(int i = 0; i < 255; i++ )
            {
                uchar* ptr = color_tab->data.ptr + i*3;
                ptr[0] = (uchar)(cvRandInt(&rng)%180 + 50);
                ptr[1] = (uchar)(cvRandInt(&rng)%180 + 50);
                ptr[2] = (uchar)(cvRandInt(&rng)%180 + 50);
            }
	  CvMemStorage* storage = cvCreateMemStorage(0);
			
      CvSeq* contours = 0;
	  CvSeq* current_contour;
      int comp_count = 0;
	  
	  // start main loop	
		
	  while (keepProcessing) {
		
		  // if capture object in use (i.e. video/camera)
		  // get image from capture object
			
		  if (capture) {
	
			  // cvQueryFrame s just a combination of cvGrabFrame 
			  // and cvRetrieveFrame in one call.
			  
			  img = cvQueryFrame(capture);
			  if(!img){
				if (argc == 2){				  
					printf("End of video file reached\n");
				} else {
					printf("ERROR: cannot get next fram from camera\n");
				}
				exit(0);
			  }
			  
		  }	  
		  
		  // if input is not already grayscale, convert to grayscale
		
		  if (img->nChannels > 1){
			 cvCvtColor(img, grayImg, CV_BGR2GRAY);
	      } else {
			grayImg = img;
		  }
	  
		  // display image in window

		  cvShowImage( windowName2, grayImg );
	
		  // check that the window size is always odd and > 3

		  if ((windowSize > 3) && (fmod((double) windowSize, 2) == 0)) {
				windowSize++;
		  } else if (windowSize < 3) {
			  windowSize = 3;
		  }
		  
		  // threshold the image and display
		  
		  cvAdaptiveThreshold(grayImg, thresholdedImg, 255,
		  						CV_ADAPTIVE_THRESH_MEAN_C, CV_THRESH_BINARY, 
		  						windowSize, constant);
		  cvShowImage( windowName3, thresholdedImg );
		  
		 // find the contours	
		  
		  cvFindContours( thresholdedImg, storage, 
		  		&contours, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );
		  
		  // if (contours) (cvSeqSort(contours, sort_contour, NULL));
		  
		  // draw the contours in the output image
			
		    cvZero(dst);
			current_contour = contours;
			comp_count = 0;
            for( ; current_contour != 0; current_contour = current_contour->h_next, comp_count++ )
            {
				uchar* ptr = color_tab->data.ptr + (comp_count)*3;
				CvScalar color = CV_RGB( ptr[0], ptr[1], ptr[2] );
                cvDrawContours( dst, current_contour, color,
                                color, -1, CV_FILLED, 8, cvPoint(0,0) );
            }
			if (contours != NULL){
				cvClearSeq(contours);
			}

		  
		  // display images in window

		  cvShowImage( windowName1, dst );
			
		  // start event processing loop (very important,in fact essential for GUI)
	      // 40 ms roughly equates to 1000ms/25fps = 40ms per frame
		  
		  key = cvWaitKey(EVENT_LOOP_DELAY);

		  if (key == 'x'){
			
	   		// if user presses "x" then exit
			
	   			printf("Keyboard exit requested : exiting now - bye!\n");	
	   			keepProcessing = false;
		  } 
	  }
      
      // destroy window objects
      // (triggered by event loop *only* window is closed)

      cvDestroyAllWindows();

      // destroy image object (if it does not originate from a capture object)

      if (!capture){
		  cvReleaseImage( &img );
      }
	  
	  // destroy image objects

      cvReleaseImage( &grayImg );
	  cvReleaseImage( &thresholdedImg );
	  cvReleaseImage( &dst );

      // all OK : main returns 0

      return 0;
    }

    // not OK : main returns -1

    return -1;
}
Beispiel #13
0
/*
    函数:contoursample 
    功能:轮廓抽样
    参数:seq ------ 轮廓点序列
     samplearry --- 用于存放抽样点
         samplearry ---- 抽样点数
*/
int contoursample(CvSeq * seq , CvPoint *samplearry, int samplenum)
{
    int num = 0 ; 
    for (CvSeq *s = seq ; s !=NULL;s=s->h_next)
        num +=s->total;

	if ( num < samplenum)
	{
		return 0; 
	}

    CvPoint *pointarray = (CvPoint *)malloc(num * sizeof(CvPoint));
    
    int accum = 0 ; 
    for (CvSeq *s =seq ; s!=NULL;s=s->h_next)
    {
        cvCvtSeqToArray( s, pointarray +accum);
        accum +=s->total;
        
    }
    
 
    // 轮廓点随机打乱
    CvRNG rng; 
    rng = cvRNG(cvGetTickCount());
    CvPoint pointtemp;
    int tagtemp = -1;
    for (int i = 0 ; i < num ; ++i)
    {
        int index = cvRandInt(&rng)%(num-i)+i;
        if(index !=i)
        {
            pointtemp = pointarray[index];
            pointarray[index] = pointarray[i];
            pointarray[i] = pointtemp;
            
        }
    }
    // 如果*samplenum > num 即取样点数远远小于轮廓点数随即抽取samplenum个点节省运算时间
    if (num > 3 * samplenum)
    {
        
        CvPoint *pointarray2 = (CvPoint *)malloc(3*samplenum * sizeof(CvPoint));
        for (int i = 0;i < 3*samplenum;++i)
        {
            pointarray2[i] = pointarray[i];
        }
        free(pointarray);
        pointarray = pointarray2;
        num = 3 * samplenum;
    }
    // 计算轮廓点与点间距离
	int beg = 0,nelement = 0;
	pairDistance* pd = (pairDistance*)malloc(sizeof(pairDistance)*((num-1)*num/2));
    for (int i = 0 ; i < num ; i++)
    {
        for (int j = i +1 ; j < num ; ++j)
        {
			pd[nelement].i = i;
			pd[nelement].j = j;
			pd[nelement++].distance = (pointarray[i].x -pointarray[j].x) * (pointarray[i].x -pointarray[j].x) + 
				(pointarray[i].y -pointarray[j].y) * (pointarray[i].y -pointarray[j].y);
        
        }
    }
	// 排序
	quick_sort(pd,0,nelement-1);
    // 删除最小距离点对中的其中一个点直到满足samplenum
    int nneedremove = num - samplenum;
    int *mask = (int *)malloc( num * sizeof(int));
    memset(mask,0,num * sizeof(int));
    //list<pairDistance>::iterator iter = list_pair.begin();
    //list<pairDistance>::iterator iter = list_pair.begin();
    while (nneedremove > 0)
    {
        int index0 = pd[beg].i;
        int index1 = pd[beg].j;
        if (mask[index0] == 0 && mask[index1] ==0)
        {
            mask[index1] = 1 ;
            nneedremove --;
        }
        beg++;
    }
    // 将抽样点存放到samplearry中
    int nstartindex = 0 ;
    for (int i = 0 ; i < num ; ++i)
    {
        if (mask[i] ==0)
        {
            samplearry[nstartindex] = pointarray[i];
            nstartindex++;
        }
    }

    free(pointarray);
	free(pd);
	return 1;
}
	/*ExpandRectDifSize(src,FaceRect,FaceRect.width*0.1,FaceRect.height*0.1);
	Mat faceImg = src(FaceRect);*/
	smokeRect =DetectSmoke(faceImg);
	for(int i=0;i<smokeRect.size();i++)
	{
		smokeRect[i].x+=FaceRect.x;
		smokeRect[i].y+=FaceRect.y;
	}
	return smokeRect;

}




Rect GetSmokeResult(Mat &src,Rect PedestrianRect,vector<Rect> helmetRectFirst)
{
	CvRNG rng;
	rng= cvRNG(cvGetTickCount());
	
	Rect HogHeadRect;
	vector<Rect> smokeRect;

	for(int j=0;j<helmetRectFirst.size();j++)
		{
			if(helmetRectFirst[j].width<10||helmetRectFirst[j].height<10)
			{
				continue;
			}else{
				HogHeadRect =GetHeadRect(helmetRectFirst[j]);
				vector<Rect> faces=GetFaceRect(src,HogHeadRect);
				
				for(int k=0;k<faces.size();k++)
				{
				//	smokeRect = GetSmokeRect(src,faces[k]);
					//检测人脸,并识别名字
					ExpandRectDifSize(src,faces[k],0,0);
					Mat faceImg = src(faces[k]);
					smokeRect = GetSmokeRect(faceImg,faces[k]);
					
					
					if(smokeRect.size()>0)
					{						
						rectangle(src,PedestrianRect,CV_RGB(255,0,0),3);
						string smokestr ="someone is smoking";
				//		putText(src, smokestr, Point(faces[k].x+faces[k].width,faces[k].y+faces[k].height), FONT_HERSHEY_COMPLEX, 1, Scalar(0, 0, 255), 3);
				//		imwrite("D://Error//"+to_string((long double)rng)+"smokeError.jpg",src);
						return PedestrianRect;
					}
				}
			}
	}
	return Rect(0,0,0,0);

}




//************************************
//* 输入:输入5帧的Mat图像,第一帧和最后一帧相差50
//* 输出:未带安全帽的人形坐标位置:noHelmetPedestrianRect
//*       未系好安全带的人形坐标位子:
//*       吸烟的人形坐标位置:smokePedestrianRect
//*       马甲人员是否到位:ClassDetectionResult(true:到位(不报警);false:不到位(报警))     
//*************************************/
void DetectConstructionSite(vector<Mat> orgMat,vector<Rect> &noHelmetPedestrianRect,vector<Rect> &smokePedestrianRect,bool &ClassDetectionResult,vector<Rect> &noBeltPedestrianRect)
{

	CvRNG rng;
	rng= cvRNG(cvGetTickCount());
	vector<Rect> PedestrianRect;
	vector<Rect> helmetRect;
	vector<Rect> smokeRect;
	vector<Rect> helmetBeltRect;
	vector<Rect> supervisorRectPerMat; 
	Rect HogHeadRect;
	Mat src = orgMat[0];

	PedestrianRect = GetCoilRegionPeople(src);
	for(int i=0;i<PedestrianRect.size();i++)
	{
		HogHeadRect=GetHeadRect(PedestrianRect[i]);
		helmetRect=GetHelmetRect(src,HogHeadRect);
		vector<Rect> faces=GetFaceRect(src,HogHeadRect);
		//检测到未系安全带
		helmetBeltRect = DetectHelmetBelt(faces,src);
		if(helmetBeltRect.size()>0)
		{
			noBeltPedestrianRect.push_back(PedestrianRect[i]);
		}
		for(int j=0;j<faces.size();j++)
		{
			//检测到吸烟
			smokeRect = GetSmokeRect(src,faces[j]);
			smokePedestrianRect.push_back(PedestrianRect[i]);
		}
		//未检测到安全帽
		if(helmetRect.size()==0)
		{
			noHelmetPedestrianRect.push_back(PedestrianRect[i]);
		}

		//检测是否穿了马甲
		int isOn = DetectSupervisorClass(PedestrianRect[i],src);
		if(isOn==1)
		{
			supervisorRectPerMat.push_back(PedestrianRect[i]);
		}
	}

	vector<Rect> supervisorPedRect;
	for(int k =1;k<orgMat.size();k++)
	{
		supervisorPedRect = GetCoilRegionPeople(orgMat[k]);
		for(int s =0;s<supervisorPedRect.size();s++)
		{
			int isOn1 = DetectSupervisorClass(supervisorPedRect[s],orgMat[k]);//Bug点
			if(isOn1==1)
			{
				for(int w =0;w<supervisorRectPerMat.size();w++)
				{
					if(abs(supervisorRectPerMat[w].x-supervisorPedRect[s].x)>20&&abs(supervisorRectPerMat[w].y-supervisorPedRect[s].y)>20)
					{
						supervisorRectPerMat.push_back(supervisorPedRect[s]);
					}
				
				}
			}
		}
	}

	if(supervisorRectPerMat.size()>=1)
	{
		ClassDetectionResult = true;
	}
	else
	{
		ClassDetectionResult = false;
	
	}
	
}
Beispiel #15
0
void CV_SolvePolyTest::run( int )
{
    CvRNG rng = cvRNG();
    int fig = 100;
    double range = 50;
    double err_eps = 1e-4;

    for (int idx = 0, max_idx = 1000, progress = 0; idx < max_idx; ++idx)
    {
        progress = update_progress(progress, idx-1, max_idx, 0);
        int n = cvRandInt(&rng) % 13 + 1;
        std::vector<complex_type> r(n), ar(n), c(n + 1, 0);
        std::vector<double> a(n + 1), u(n * 2), ar1(n), ar2(n);

        int rr_odds = 3; // odds that we get a real root
        for (int j = 0; j < n;)
        {
            if (cvRandInt(&rng) % rr_odds == 0 || j == n - 1)
	            r[j++] = cvRandReal(&rng) * range;
            else
            {
	            r[j] = complex_type(cvRandReal(&rng) * range,
			    cvRandReal(&rng) * range + 1);
	            r[j + 1] = std::conj(r[j]);
	            j += 2;
            }
        }

        for (int j = 0, k = 1 << n, jj, kk; j < k; ++j)
        {
            int p = 0;
            complex_type v(1);
            for (jj = 0, kk = 1; jj < n && !(j & kk); ++jj, ++p, kk <<= 1)
                ;
            for (; jj < n; ++jj, kk <<= 1)
            {
	            if (j & kk)
	                v *= -r[jj];
	            else
	                ++p;
            }
            c[p] += v;
        }

        bool pass = false;
        double div = 0, s = 0;
        int cubic_case = idx & 1;
        for (int maxiter = 100; !pass && maxiter < 10000; maxiter *= 2, cubic_case = (cubic_case + 1) % 2)
        {
            for (int j = 0; j < n + 1; ++j)
	            a[j] = c[j].real();

            CvMat amat, umat;
            cvInitMatHeader(&amat, n + 1, 1, CV_64FC1, &a[0]);
            cvInitMatHeader(&umat, n, 1, CV_64FC2, &u[0]);
            cvSolvePoly(&amat, &umat, maxiter, fig);

            for (int j = 0; j < n; ++j)
	            ar[j] = complex_type(u[j * 2], u[j * 2 + 1]);

            sort(r.begin(), r.end(), pred_complex());
            sort(ar.begin(), ar.end(), pred_complex());

            pass = true;
            if( n == 3 )
            {
                ar2.resize(n);
                cv::Mat _umat2(3, 1, CV_64F, &ar2[0]), umat2 = _umat2;
                cvFlip(&amat, &amat, 0);
                int nr2;
                if( cubic_case == 0 )
                    nr2 = cv::solveCubic(cv::Mat(&amat),umat2);
                else
                    nr2 = cv::solveCubic(cv::Mat_<float>(cv::Mat(&amat)), umat2);
                cvFlip(&amat, &amat, 0);
                if(nr2 > 0)
                    sort(ar2.begin(), ar2.begin()+nr2, pred_double());
                ar2.resize(nr2);

                int nr1 = 0;
                for(int j = 0; j < n; j++)
                    if( fabs(r[j].imag()) < DBL_EPSILON )
                        ar1[nr1++] = r[j].real();

                pass = pass && nr1 == nr2;
                if( nr2 > 0 )
                {
                    div = s = 0;
                    for(int j = 0; j < nr1; j++)
                    {
                        s += fabs(ar1[j]);
                        div += fabs(ar1[j] - ar2[j]);
                    }
                    div /= s;
                    pass = pass && div < err_eps;
                }
            }

            div = s = 0;
            for (int j = 0; j < n; ++j)
            {
                s += fabs(r[j].real()) + fabs(r[j].imag());
                div += sqrt(pow(r[j].real() - ar[j].real(), 2) + pow(r[j].imag() - ar[j].imag(), 2));
            }
            div /= s;
            pass = pass && div < err_eps;
        }

        if (!pass)
        {
            ts->set_failed_test_info(CvTS::FAIL_INVALID_OUTPUT);
            ts->printf( CvTS::LOG, "too big diff = %g\n", div );

            for (size_t j=0;j<ar2.size();++j)
                ts->printf( CvTS::LOG, "ar2[%d]=%g\n", j, ar2[j]);
            ts->printf(CvTS::LOG, "\n");

            for (size_t j=0;j<r.size();++j)
	            ts->printf( CvTS::LOG, "r[%d]=(%g, %g)\n", j, r[j].real(), r[j].imag());
            ts->printf( CvTS::LOG, "\n" );
            for (size_t j=0;j<ar.size();++j)
	            ts->printf( CvTS::LOG, "ar[%d]=(%g, %g)\n", j, ar[j].real(), ar[j].imag());
            break;
        }
    }
}
Beispiel #16
0
void randinitalize( const int init ){
	rng_state = cvRNG(init);
}
Beispiel #17
0
static void
icvDFSInitSpillTreeNode( const CvSpillTree* tr,
			 const int d,
			 CvSpillTreeNode* node )
{
  if ( node->cc <= tr->naive )
    {
      // already get to a leaf, terminate the recursion.
      node->leaf = true;
      node->spill = false;
      return;
    }

  // random select a node, then find a farthest node from this one, then find a farthest from that one...
  // to approximate the farthest node-pair
  static CvRNG rng_state = cvRNG(0xdeadbeef);
  int rn = cvRandInt( &rng_state ) % node->cc;
  CvSpillTreeNode* lnode = NULL;
  CvSpillTreeNode* rnode = node->lc;
  for ( int i = 0; i < rn; i++ )
    rnode = rnode->rc;
  lnode = icvFarthestNode( rnode, node->lc, node->cc );
  rnode = icvFarthestNode( lnode, node->lc, node->cc );

  // u is the projection vector
  node->u = cvCreateMat( 1, d, tr->type );
  cvSub( lnode->center, rnode->center, node->u );
  cvNormalize( node->u, node->u );
	
  // find the center of node in hyperspace
  node->center = cvCreateMat( 1, d, tr->type );
  cvZero( node->center );
  CvSpillTreeNode* it = node->lc;
  for ( int i = 0; i < node->cc; i++ )
    {
      cvAdd( it->center, node->center, node->center );
      it = it->rc;
    }
  cvConvertScale( node->center, node->center, 1./node->cc );

  // project every node to "u", and find the mean point "mp"
  it = node->lc;
  node->r = -1.;
  node->mp = 0;
  for ( int i = 0; i < node->cc; i++ )
    {
      node->mp += ( it->p = cvDotProduct( it->center, node->u ) );
      double norm = cvNorm( node->center, it->center );
      if ( norm > node->r )
	node->r = norm;
      it = it->rc;
    }
  node->mp = node->mp / node->cc;

  // overlapping buffer and upper bound, lower bound
  double ob = (lnode->p-rnode->p)*tr->tau*.5;
  node->ub = node->mp+ob;
  node->lb = node->mp-ob;
  int sl = 0, l = 0;
  int sr = 0, r = 0;
  it = node->lc;
  for ( int i = 0; i < node->cc; i++ )
    {
      if ( it->p <= node->ub )
	sl++;
      if ( it->p >= node->lb )
	sr++;
      if ( it->p < node->mp )
	l++;
      else
	r++;
      it = it->rc;
    }
  // precision problem, return the node as it is.
  if (( l == 0 )||( r == 0 ))
    {
      cvReleaseMat( &(node->u) );
      cvReleaseMat( &(node->center) );
      node->leaf = true;
      node->spill = false;
      return;
    }
  CvSpillTreeNode* lc = (CvSpillTreeNode*)cvAlloc( sizeof(CvSpillTreeNode) );
  memset(lc, 0, sizeof(CvSpillTreeNode));
  CvSpillTreeNode* rc = (CvSpillTreeNode*)cvAlloc( sizeof(CvSpillTreeNode) );
  memset(rc, 0, sizeof(CvSpillTreeNode));
  lc->lc = lc->rc = rc->lc = rc->rc = NULL;
  lc->cc = rc->cc = 0;
  int undo = cvRound(node->cc*tr->rho);
  if (( sl >= undo )||( sr >= undo ))
    {
      // it is not a spill point (defeatist search disabled)
      it = node->lc;
      for ( int i = 0; i < node->cc; i++ )
	{
	  CvSpillTreeNode* next = it->rc;
	  if ( it->p < node->mp )
	    icvAppendSpillTreeNode( lc, it );
	  else
	    icvAppendSpillTreeNode( rc, it );
	  it = next;
	}
      node->spill = false;
    } else {
      // a spill point
      it = node->lc;
      for ( int i = 0; i < node->cc; i++ )
	{
	  CvSpillTreeNode* next = it->rc;
	  if ( it->p < node->lb )
	    icvAppendSpillTreeNode( lc, it );
	  else if ( it->p > node->ub )
	    icvAppendSpillTreeNode( rc, it );
	  else {
	    CvSpillTreeNode* cit = icvCloneSpillTreeNode( it );
	    icvAppendSpillTreeNode( lc, it );
	    icvAppendSpillTreeNode( rc, cit );
	  }
	  it = next;
	}
      node->spill = true;
    }
  node->lc = lc;
  node->rc = rc;

  // recursion process
  icvDFSInitSpillTreeNode( tr, d, node->lc );
  icvDFSInitSpillTreeNode( tr, d, node->rc );
}
#define MODE_POLYGON  4     // 画多边形

// 当前模式
int g_draw_mode = 0;

typedef struct MyPoints
{
	CvPoint point;
	struct MyPoints* next;
} myPoints;

IplImage * image;
IplImage * temp;

// 随机数产生器
CvRNG rng = cvRNG(cvGetTickCount()); 

// 是否在画线
bool drawing_line = false;
// 画线--起点、终点
CvPoint g_Line_startpoint = cvPoint(-1, -1);
CvPoint g_Line_endpoint = cvPoint(-1, -1);

// 是否在画圆
bool drawing_circle = false;
// 画圆--圆心、半径
CvPoint g_Circle_center = cvPoint(-1, -1);
int g_Circle_radius = -1;

// 是否在画椭圆
bool drawing_ellipse = false;
int main(int argc, char** argv)
{
    const float A[] = { 1, 1, 0, 1 };//状态转移矩阵
    
    IplImage* img = cvCreateImage( cvSize(500,500), 8, 3 );//创建显示所用的图像
    CvKalman* kalman = cvCreateKalman( 2, 1, 0 );//创建cvKalman数据结构,状态向量为2维,观测向量为1维,无激励输入维
    CvMat* state = cvCreateMat( 2, 1, CV_32FC1 ); //(phi, delta_phi) 定义了状态变量
    CvMat* process_noise = cvCreateMat( 2, 1, CV_32FC1 );// 创建两行一列CV_32FC1的单通道浮点型矩阵
    CvMat* measurement = cvCreateMat( 1, 1, CV_32FC1 ); //定义观测变量
    CvRNG rng = cvRNG(-1);//初始化一个随机序列函数
    char code = -1;

    cvZero( measurement );//观测变量矩阵置零
    cvNamedWindow( "Kalman", 1 );

    for(;;)
    {		//用均匀分布或者正态分布的随机数填充输出数组state
        cvRandArr( &rng, state, CV_RAND_NORMAL, cvRealScalar(0), cvRealScalar(0.1) );//状态state
        memcpy( kalman->transition_matrix->data.fl, A, sizeof(A));//初始化状态转移F矩阵
        
        //cvSetIdentity()用法:把数组中除了行数与列数相等以外的所有元素的值都设置为0;行数与列数相等的元素的值都设置为1
        //我们将(第一个前假象阶段的)后验状态初始化为一个随机值
        cvSetIdentity( kalman->measurement_matrix, cvRealScalar(1) );//观测矩阵H
        cvSetIdentity( kalman->process_noise_cov, cvRealScalar(1e-5) );//过程噪声Q
        cvSetIdentity( kalman->measurement_noise_cov, cvRealScalar(1e-1) );//观测噪声R 
        cvSetIdentity( kalman->error_cov_post, cvRealScalar(1));//后验误差协方差
        cvRandArr( &rng, kalman->state_post, CV_RAND_NORMAL, cvRealScalar(0), cvRealScalar(0.1) );//校正状态
        
        //在时机动态系统上开始预测
        
        for(;;)
        {
            #define calc_point(angle)                                      \
                cvPoint( cvRound(img->width/2 + img->width/3*cos(angle)),  \
                         cvRound(img->height/2 - img->width/3*sin(angle))) 

            float state_angle = state->data.fl[0];
            CvPoint state_pt = calc_point(state_angle);
            
            const CvMat* prediction = cvKalmanPredict( kalman, 0 );//计算下一个时间点的预期值,激励项输入为0
            float predict_angle = prediction->data.fl[0];
            CvPoint predict_pt = calc_point(predict_angle);
            
            float measurement_angle;
            CvPoint measurement_pt;

            cvRandArr( &rng, measurement, CV_RAND_NORMAL, cvRealScalar(0),
                       cvRealScalar(sqrt(kalman->measurement_noise_cov->data.fl[0])) );

            /* generate measurement */
            cvMatMulAdd( kalman->measurement_matrix, state, measurement, measurement );
            //cvMatMulAdd(src1,src2,src3,dst)就是实现dist=src1*src2+src3; 

            measurement_angle = measurement->data.fl[0];
            measurement_pt = calc_point(measurement_angle);
            
            //调用Kalman滤波器并赋予其最新的测量值,接下来就是产生过程噪声,然后对状态乘以传递矩阵F完成一次迭代并加上我们产生的过程噪声
            /* plot points */
            #define draw_cross( center, color, d )                                 \
                cvLine( img, cvPoint( center.x - d, center.y - d ),                \
                             cvPoint( center.x + d, center.y + d ), color, 1, CV_AA, 0); \
                cvLine( img, cvPoint( center.x + d, center.y - d ),                \
                             cvPoint( center.x - d, center.y + d ), color, 1, CV_AA, 0 )

            cvZero( img );
            //使用上面宏定义的函数
            draw_cross( state_pt, CV_RGB(255,255,255), 3 );//白色,状态点
            draw_cross( measurement_pt, CV_RGB(255,0,0), 3 );//红色,测量点
            draw_cross( predict_pt, CV_RGB(0,255,0), 3 );//绿色,估计点
            cvLine( img, state_pt, measurement_pt, CV_RGB(255,0,0), 3, CV_AA, 0 );
            cvLine( img, state_pt, predict_pt, CV_RGB(255,255,0), 3, CV_AA, 0 );
            
            cvKalmanCorrect( kalman, measurement );//校正新的测量值

            cvRandArr( &rng, process_noise, CV_RAND_NORMAL, cvRealScalar(0),
                       cvRealScalar(sqrt(kalman->process_noise_cov->data.fl[0])));//设置正态分布过程噪声
            cvMatMulAdd( kalman->transition_matrix, state, process_noise, state );

            cvShowImage( "Kalman", img );
			//当按键按下时,开始新的循环,初始矩阵可能会改变,所以移动速率会改变
            code = (char) cvWaitKey( 100 );
            if( code > 0 )
                break;
        }
        if( code == 27 || code == 'q' || code == 'Q' )
            break;
    }
    
    cvDestroyWindow("Kalman");

    return 0;
}
int main(int argc, char **argv)
{

	float priors[] = { 1.0, 10.0 };	// Edible vs poisonos weights

	CvMat *var_type;
	CvMat *data;				// jmh add
	data = cvCreateMat(20, 30, CV_8U);	// jmh add

	var_type = cvCreateMat(data->cols + 1, 1, CV_8U);
	cvSet(var_type, cvScalarAll(CV_VAR_CATEGORICAL));	// all these vars 
	// are categorical
	CvDTree *dtree;
	dtree = new CvDTree;
	dtree->train(data, CV_ROW_SAMPLE, responses, 0, 0, var_type, missing, CvDTreeParams(8,	// max depth
																						10,	// min sample count
																						0,	// regression accuracy: N/A here
																						true,	// compute surrogate split, 
																						//   as we have missing data
																						15,	// max number of categories 
																						//   (use sub-optimal algorithm for
																						//   larger numbers)
																						10,	// cross-validations 
																						true,	// use 1SE rule => smaller tree
																						true,	// throw away the pruned tree branches
																						priors	// the array of priors, the bigger 
																						//   p_weight, the more attention
																						//   to the poisonous mushrooms
				 )
		);

	dtree->save("tree.xml", "MyTree");
	dtree->clear();
	dtree->load("tree.xml", "MyTree");

#define MAX_CLUSTERS 5
	CvScalar color_tab[MAX_CLUSTERS];
	IplImage *img = cvCreateImage(cvSize(500, 500), 8, 3);
	CvRNG rng = cvRNG(0xffffffff);

	color_tab[0] = CV_RGB(255, 0, 0);
	color_tab[1] = CV_RGB(0, 255, 0);
	color_tab[2] = CV_RGB(100, 100, 255);
	color_tab[3] = CV_RGB(255, 0, 255);
	color_tab[4] = CV_RGB(255, 255, 0);

	cvNamedWindow("clusters", 1);

	for (;;) {
		int k, cluster_count = cvRandInt(&rng) % MAX_CLUSTERS + 1;
		int i, sample_count = cvRandInt(&rng) % 1000 + 1;
		CvMat *points = cvCreateMat(sample_count, 1, CV_32FC2);
		CvMat *clusters = cvCreateMat(sample_count, 1, CV_32SC1);

		/* generate random sample from multivariate 
		   Gaussian distribution */
		for (k = 0; k < cluster_count; k++) {
			CvPoint center;
			CvMat point_chunk;
			center.x = cvRandInt(&rng) % img->width;
			center.y = cvRandInt(&rng) % img->height;
			cvGetRows(points, &point_chunk,
					  k * sample_count / cluster_count,
					  k == cluster_count - 1 ? sample_count :
					  (k + 1) * sample_count / cluster_count);
			cvRandArr(&rng, &point_chunk, CV_RAND_NORMAL,
					  cvScalar(center.x, center.y, 0, 0),
					  cvScalar(img->width / 6, img->height / 6, 0, 0));
		}

		/* shuffle samples */
		for (i = 0; i < sample_count / 2; i++) {
			CvPoint2D32f *pt1 = (CvPoint2D32f *) points->data.fl +
				cvRandInt(&rng) % sample_count;
			CvPoint2D32f *pt2 = (CvPoint2D32f *) points->data.fl +
				cvRandInt(&rng) % sample_count;
			CvPoint2D32f temp;
			CV_SWAP(*pt1, *pt2, temp);
		}

		cvKMeans2(points, cluster_count, clusters,
				  cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 10, 1.0));
		cvZero(img);
		for (i = 0; i < sample_count; i++) {
			CvPoint2D32f pt = ((CvPoint2D32f *) points->data.fl)[i];
			int cluster_idx = clusters->data.i[i];
			cvCircle(img, cvPointFrom32f(pt), 2,
					 color_tab[cluster_idx], CV_FILLED);
		}

		cvReleaseMat(&points);
		cvReleaseMat(&clusters);

		cvShowImage("clusters", img);

		int key = cvWaitKey(0);
		if (key == 27)			// 'ESC'
			break;
	}
}
Beispiel #21
0
CV_IMPL void cvRandShuffle( CvArr* arr, CvRNG* rng, double iter_factor )
{
    CV_FUNCNAME( "cvRandShuffle" );

    __BEGIN__;

    const int sizeof_int = (int)sizeof(int);
    CvMat stub, *mat = (CvMat*)arr;
    int i, j, k, iters, delta = 0;
    int cont_flag, arr_size, elem_size, cols, step;
    const int pair_buf_sz = 100;
    int* pair_buf = (int*)cvStackAlloc( pair_buf_sz*sizeof(pair_buf[0])*2 );
    CvMat _pair_buf = cvMat( 1, pair_buf_sz*2, CV_32S, pair_buf );
    CvRNG _rng = cvRNG(-1);
    uchar* data = 0;
    int* idata = 0;
    
    if( !CV_IS_MAT(mat) )
        CV_CALL( mat = cvGetMat( mat, &stub ));

    if( !rng )
        rng = &_rng;

    cols = mat->cols;
    step = mat->step;
    arr_size = cols*mat->rows;
    iters = cvRound(iter_factor*arr_size)*2;
    cont_flag = CV_IS_MAT_CONT(mat->type);
    elem_size = CV_ELEM_SIZE(mat->type);
    if( elem_size % sizeof_int == 0 && (cont_flag || step % sizeof_int == 0) )
    {
        idata = mat->data.i;
        step /= sizeof_int;
        elem_size /= sizeof_int;
    }
    else
        data = mat->data.ptr;

    for( i = 0; i < iters; i += delta )
    {
        delta = MIN( iters - i, pair_buf_sz*2 );
        _pair_buf.cols = delta;
        cvRandArr( rng, &_pair_buf, CV_RAND_UNI, cvRealScalar(0), cvRealScalar(arr_size) );
        
        if( cont_flag )
        {
            if( idata )
                for( j = 0; j < delta; j += 2 )
                {
                    int* p = idata + pair_buf[j]*elem_size, *q = idata + pair_buf[j+1]*elem_size, t;
                    for( k = 0; k < elem_size; k++ )
                        CV_SWAP( p[k], q[k], t );
                }
            else
                for( j = 0; j < delta; j += 2 )
                {
                    uchar* p = data + pair_buf[j]*elem_size, *q = data + pair_buf[j+1]*elem_size, t;
                    for( k = 0; k < elem_size; k++ )
                        CV_SWAP( p[k], q[k], t );
                }
        }
        else
        {
            if( idata )
                for( j = 0; j < delta; j += 2 )
                {
                    int idx1 = pair_buf[j], idx2 = pair_buf[j+1], row1, row2;
                    int* p, *q, t;
                    row1 = idx1/step; row2 = idx2/step;
                    p = idata + row1*step + (idx1 - row1*cols)*elem_size;
                    q = idata + row2*step + (idx2 - row2*cols)*elem_size;
                    
                    for( k = 0; k < elem_size; k++ )
                        CV_SWAP( p[k], q[k], t );
                }
            else
                for( j = 0; j < delta; j += 2 )
                {
                    int idx1 = pair_buf[j], idx2 = pair_buf[j+1], row1, row2;
                    uchar* p, *q, t;
                    row1 = idx1/step; row2 = idx2/step;
                    p = data + row1*step + (idx1 - row1*cols)*elem_size;
                    q = data + row2*step + (idx2 - row2*cols)*elem_size;
                    
                    for( k = 0; k < elem_size; k++ )
                        CV_SWAP( p[k], q[k], t );
                }
        }
    }

    __END__;
}
Beispiel #22
0
CV_IMPL void cvFindStereoCorrespondenceGC(const CvArr* _left, const CvArr* _right,
        CvArr* _dispLeft, CvArr* _dispRight, CvStereoGCState* state, int useDisparityGuess) {
    CvStereoGCState2 state2;
    state2.orphans = 0;
    state2.maxOrphans = 0;

    CvMat lstub, *left = cvGetMat(_left, &lstub);
    CvMat rstub, *right = cvGetMat(_right, &rstub);
    CvMat dlstub, *dispLeft = cvGetMat(_dispLeft, &dlstub);
    CvMat drstub, *dispRight = cvGetMat(_dispRight, &drstub);
    CvSize size;
    int iter, i, nZeroExpansions = 0;
    CvRNG rng = cvRNG(-1);
    int* disp;
    CvMat _disp;
    int64 E;

    CV_Assert(state != 0);
    CV_Assert(CV_ARE_SIZES_EQ(left, right) && CV_ARE_TYPES_EQ(left, right) &&
              CV_MAT_TYPE(left->type) == CV_8UC1);
    CV_Assert(!dispLeft ||
              (CV_ARE_SIZES_EQ(dispLeft, left) && CV_MAT_CN(dispLeft->type) == 1));
    CV_Assert(!dispRight ||
              (CV_ARE_SIZES_EQ(dispRight, left) && CV_MAT_CN(dispRight->type) == 1));

    size = cvGetSize(left);
    if (!state->left || state->left->width != size.width || state->left->height != size.height) {
        int pcn = (int)(sizeof(GCVtx*) / sizeof(int));
        int vcn = (int)(sizeof(GCVtx) / sizeof(int));
        int ecn = (int)(sizeof(GCEdge) / sizeof(int));
        cvReleaseMat(&state->left);
        cvReleaseMat(&state->right);
        cvReleaseMat(&state->ptrLeft);
        cvReleaseMat(&state->ptrRight);
        cvReleaseMat(&state->dispLeft);
        cvReleaseMat(&state->dispRight);

        state->left = cvCreateMat(size.height, size.width, CV_8UC3);
        state->right = cvCreateMat(size.height, size.width, CV_8UC3);
        state->dispLeft = cvCreateMat(size.height, size.width, CV_16SC1);
        state->dispRight = cvCreateMat(size.height, size.width, CV_16SC1);
        state->ptrLeft = cvCreateMat(size.height, size.width, CV_32SC(pcn));
        state->ptrRight = cvCreateMat(size.height, size.width, CV_32SC(pcn));
        state->vtxBuf = cvCreateMat(1, size.height * size.width * 2, CV_32SC(vcn));
        state->edgeBuf = cvCreateMat(1, size.height * size.width * 12 + 16, CV_32SC(ecn));
    }

    if (!useDisparityGuess) {
        cvSet(state->dispLeft, cvScalarAll(OCCLUDED));
        cvSet(state->dispRight, cvScalarAll(OCCLUDED));
    } else {
        CV_Assert(dispLeft && dispRight);
        cvConvert(dispLeft, state->dispLeft);
        cvConvert(dispRight, state->dispRight);
    }

    state2.Ithreshold = state->Ithreshold;
    state2.interactionRadius = state->interactionRadius;
    state2.lambda = cvRound(state->lambda * DENOMINATOR);
    state2.lambda1 = cvRound(state->lambda1 * DENOMINATOR);
    state2.lambda2 = cvRound(state->lambda2 * DENOMINATOR);
    state2.K = cvRound(state->K * DENOMINATOR);

    icvInitStereoConstTabs();
    icvInitGraySubpix(left, right, state->left, state->right);
    disp = (int*)cvStackAlloc(state->numberOfDisparities * sizeof(disp[0]));
    _disp = cvMat(1, state->numberOfDisparities, CV_32S, disp);
    cvRange(&_disp, state->minDisparity, state->minDisparity + state->numberOfDisparities);
    cvRandShuffle(&_disp, &rng);

    if (state2.lambda < 0 && (state2.K < 0 || state2.lambda1 < 0 || state2.lambda2 < 0)) {
        float L = icvComputeK(state) * 0.2f;
        state2.lambda = cvRound(L * DENOMINATOR);
    }

    if (state2.K < 0) {
        state2.K = state2.lambda * 5;
    }
    if (state2.lambda1 < 0) {
        state2.lambda1 = state2.lambda * 3;
    }
    if (state2.lambda2 < 0) {
        state2.lambda2 = state2.lambda;
    }

    icvInitStereoTabs(&state2);

    E = icvComputeEnergy(state, &state2, !useDisparityGuess);
    for (iter = 0; iter < state->maxIters; iter++) {
        for (i = 0; i < state->numberOfDisparities; i++) {
            int alpha = disp[i];
            int64 Enew = icvAlphaExpand(E, -alpha, state, &state2);
            if (Enew < E) {
                nZeroExpansions = 0;
                E = Enew;
            } else if (++nZeroExpansions >= state->numberOfDisparities) {
                break;
            }
        }
    }

    if (dispLeft) {
        cvConvert(state->dispLeft, dispLeft);
    }
    if (dispRight) {
        cvConvert(state->dispRight, dispRight);
    }

    cvFree(&state2.orphans);
}
Beispiel #23
0
int main( int argc, char** argv )
{
    char* filename = argc >= 2 ? argv[1] : (char*)"fruits.jpg";
    CvRNG rng = cvRNG(-1);

    if( (img0 = cvLoadImage(filename,1)) == 0 )
        return 0;

    printf( "Hot keys: \n"
            "\tESC - quit the program\n"
            "\tr - restore the original image\n"
            "\tw or SPACE - run watershed algorithm\n"
            "\t\t(before running it, roughly mark the areas on the image)\n"
            "\t  (before that, roughly outline several markers on the image)\n" );
    
    cvNamedWindow( "image", 1 );
    cvNamedWindow( "watershed transform", 1 );

    img = cvCloneImage( img0 );
    img_gray = cvCloneImage( img0 );
    wshed = cvCloneImage( img0 );
    marker_mask = cvCreateImage( cvGetSize(img), 8, 1 );
    markers = cvCreateImage( cvGetSize(img), IPL_DEPTH_32S, 1 );
    cvCvtColor( img, marker_mask, CV_BGR2GRAY );
    cvCvtColor( marker_mask, img_gray, CV_GRAY2BGR );

    cvZero( marker_mask );
    cvZero( wshed );
    cvShowImage( "image", img );
    cvShowImage( "watershed transform", wshed );
    cvSetMouseCallback( "image", on_mouse, 0 );

    for(;;)
    {
        int c = cvWaitKey(0);

        if( (char)c == 27 )
            break;

        if( (char)c == 'r' )
        {
            cvZero( marker_mask );
            cvCopy( img0, img );
            cvShowImage( "image", img );
        }

        if( (char)c == 'w' || (char)c == ' ' )
        {
            CvMemStorage* storage = cvCreateMemStorage(0);
            CvSeq* contours = 0;
            CvMat* color_tab;
            int i, j, comp_count = 0;
            //cvSaveImage( "wshed_mask.png", marker_mask );
            //marker_mask = cvLoadImage( "wshed_mask.png", 0 );
            cvFindContours( marker_mask, storage, &contours, sizeof(CvContour),
                            CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );
            cvZero( markers );
            for( ; contours != 0; contours = contours->h_next, comp_count++ )
            {
                cvDrawContours( markers, contours, cvScalarAll(comp_count+1),
                                cvScalarAll(comp_count+1), -1, -1, 8, cvPoint(0,0) );
            }

            color_tab = cvCreateMat( 1, comp_count, CV_8UC3 );
            for( i = 0; i < comp_count; i++ )
            {
                uchar* ptr = color_tab->data.ptr + i*3;
                ptr[0] = (uchar)(cvRandInt(&rng)%180 + 50);
                ptr[1] = (uchar)(cvRandInt(&rng)%180 + 50);
                ptr[2] = (uchar)(cvRandInt(&rng)%180 + 50);
            }

            {
            double t = (double)cvGetTickCount();
            cvWatershed( img0, markers );
            t = (double)cvGetTickCount() - t;
            printf( "exec time = %gms\n", t/(cvGetTickFrequency()*1000.) );
            }

            // paint the watershed image
            for( i = 0; i < markers->height; i++ )
                for( j = 0; j < markers->width; j++ )
                {
                    int idx = CV_IMAGE_ELEM( markers, int, i, j );
                    uchar* dst = &CV_IMAGE_ELEM( wshed, uchar, i, j*3 );
                    if( idx == -1 )
                        dst[0] = dst[1] = dst[2] = (uchar)255;
                    else if( idx <= 0 || idx > comp_count )
                        dst[0] = dst[1] = dst[2] = (uchar)0; // should not get here
                    else
                    {
                        uchar* ptr = color_tab->data.ptr + (idx-1)*3;
                        dst[0] = ptr[0]; dst[1] = ptr[1]; dst[2] = ptr[2];
                    }
                }

            cvAddWeighted( wshed, 0.5, img_gray, 0.5, 0, wshed );
            cvShowImage( "watershed transform", wshed );
            cvReleaseMemStorage( &storage );
            cvReleaseMat( &color_tab );
        }
    }
Beispiel #24
0
void CV_SolvePolyTest::run( int start_from ) {
    CvRNG rng = cvRNG();

    int fig = 100;
    double range = 50;

    for (int idx = 0, max_idx = 1000, progress = 0;
            idx < max_idx; ++idx) {
        int n = cvRandInt(&rng) % 13 + 1;

        std::vector<complex_type> r(n), ar(n), c(n + 1, 0);
        std::vector<double> a(n + 1), u(n * 2);

        int rr_odds = 3; // odds that we get a real root
        for (int j = 0; j < n;) {
            if (cvRandInt(&rng) % rr_odds == 0 || j == n - 1)
                r[j++] = cvRandReal(&rng) * range;
            else {
                r[j] = complex_type(cvRandReal(&rng) * range,
                                    cvRandReal(&rng) * range + 1);
                r[j + 1] = std::conj(r[j]);
                j += 2;
            }
        }

        for (int j = 0, k = 1 << n, jj, kk; j < k; ++j) {
            int p = 0;
            complex_type v(1);
            for (jj = 0, kk = 1; jj < n && !(j & kk);
                    ++jj, ++p, kk <<= 1);
            for (; jj < n; ++jj, kk <<= 1)
                if (j & kk)
                    v *= -r[jj];
                else
                    ++p;
            c[p] += v;
        }

        bool pass = false;
        double div;
        for (int maxiter = 10;
                !pass && maxiter < 10000;
                maxiter *= 2) {
            for (int j = 0; j < n + 1; ++j)
                a[j] = c[j].real();

            CvMat amat, umat;
            cvInitMatHeader(&amat, n + 1, 1, CV_64FC1, &a[0]);
            cvInitMatHeader(&umat, n, 1, CV_64FC2, &u[0]);
            cvSolvePoly(&amat, &umat, maxiter, fig);

            for (int j = 0; j < n; ++j)
                ar[j] = complex_type(u[j * 2], u[j * 2 + 1]);

            sort(r.begin(), r.end(), pred_complex());
            sort(ar.begin(), ar.end(), pred_complex());

            div = 0;
            double s = 0;
            for (int j = 0; j < n; ++j) {
                s += r[j].real() + fabs(r[j].imag());
                div += pow(r[j].real() - ar[j].real(), 2) +
                       pow(r[j].imag() - ar[j].imag(), 2);
            }
            div /= s;
            pass = div < 1e-2;
        }

        if (!pass) {
            ts->set_failed_test_info(CvTS::FAIL_INVALID_OUTPUT);

            std::cerr<<std::endl;

            for (unsigned int j=0; j<r.size(); ++j)
                std::cout << "r[" << j << "] = " << r[j] << std::endl;

            std::cout << std::endl;
            for (unsigned int j=0; j<ar.size(); ++j)
                std::cout << "ar[" << j << "] = " << ar[j] << std::endl;
        }

        progress = update_progress(progress, idx-1, max_idx, 0);
    }
}
/**
 * Main function
 * Controls the robot using the keyboard keys and outputs posture and velocity
 * related information.
 */
int main(int argc, char** argv)
{
  // Open file to store robot poses
  outfile.open("Postures.txt");

  // Create a window to show the map
  cv::namedWindow("Mapa", 0);

  //
  // Create map related variables
  //
  map.create(ceil(MAP_HEIGHT/MAP_RESOLUTION),
             ceil(MAP_WIDTH/MAP_RESOLUTION),
             CV_8UC1);
  // Set the initial map cell values to "undefined"
  map.setTo(127);

  //
  // Create robot related objects
  //
  // Linear and angular velocities for the robot (initially stopped)
  double lin_vel=0, ang_vel=0;
  double last_ang_vel = DEG2RAD(15);
  // Navigation variables
  bool avoid, new_rotation = false;
  double stop_front_dist, min_front_dist;
  // Random number generator (used for direction choice)
  CvRNG rnggstate = cvRNG(0xffffffff);

  // Init ROS
  ros::init(argc, argv, "tp4");

  // ROS variables/objects
  ros::NodeHandle nh; // Node handle
  ros::Publisher vel_pub; // Velocity commands publisher
  geometry_msgs::Twist vel_cmd; // Velocity commands

  std::cout << "Random navigation with obstacle avoidance and map generation\n"
            << "---------------------------" << std::endl;

  // Get parameters
  ros::NodeHandle n_private("~");
  n_private.param("min_front_dist", min_front_dist, 1.0);
  n_private.param("stop_front_dist", stop_front_dist, 0.6);

  /// Setup subscribers
  // Odometry
  ros::Subscriber sub_odom = nh.subscribe("/robot_0/odom", 1, odomCallback);
  // Laser scans
  ros::Subscriber sub_laser = nh.subscribe("/robot_0/base_scan", 1, laserCallback);
  // Point clouds
  ros::Subscriber sub_pcl = nh.subscribe("cloud_filtered", 1, pointCloudCallback);

  /// Setup publisher
  vel_pub = nh.advertise<geometry_msgs::Twist>("/robot_0/cmd_vel", 1);

  // Infinite loop
  ros::Rate cycle(10.0); // Rate when no key is being pressed
  while(ros::ok())
  {
    // Get data from the robot and print it if available
    ros::spinOnce();

    // Only change navigation controls if laser was updated
    if( laser_updated == false )
      continue;

    // show pose estimated from odometry
    std::cout << std::setiosflags(std::ios::fixed) << std::setprecision(3)
              << "Robot estimated pose = "
              << robot_pose.x << " [m], " << robot_pose.y << " [m], "
              << RAD2DEG(robot_pose.theta) << " [º]\n";

    // Show estimated velocity
    std::cout << "Robot estimated velocity = "
              << true_lin_vel << " [m/s], "
              << RAD2DEG(true_ang_vel) << " [º/s]\n";

    // Check for obstacles near the front  of the robot
    avoid = false;
    if( closest_front_obstacle < min_front_dist )
    {
      if( closest_front_obstacle < stop_front_dist )
      {
        avoid = true;
        lin_vel = -0.100;
      } else
      {
        avoid = true;
        lin_vel = 0;
      }
    } else
    {
      lin_vel = 0.8;
      ang_vel = 0;
      new_rotation = false;
    }

    // Rotate to avoid obstacles
    if(avoid)
    {
      if( new_rotation == false )
      {
        float rnd_point = cvRandReal(&rnggstate );
        if( rnd_point >= 0.9 )
        {
          last_ang_vel = -last_ang_vel;
        }
      }
      ang_vel = last_ang_vel;
      new_rotation = true;
    }

    // Limit maximum velocities
    // (not needed here)
//    lin_vel = clipValue(lin_vel, -MAX_LIN_VEL, MAX_LIN_VEL);
//    ang_vel = clipValue(ang_vel, -MAX_ANG_VEL, MAX_ANG_VEL);

    // Show desired velocity
    std::cout << "Robot desired velocity = "
              << lin_vel << " [m/s], "
              << RAD2DEG(lin_vel) << " [º/s]" << std::endl;

    // Send velocity commands
    vel_cmd.angular.z = ang_vel;
    vel_cmd.linear.x = lin_vel;
    vel_pub.publish(vel_cmd);

    // Terminate loop if Escape key is pressed
    if( cv::waitKey(10) == 27 )
      break;

    // Proceed at desired framerate
    cycle.sleep();
  }

  // If we are quitting, stop the robot
  vel_cmd.angular.z = 0;
  vel_cmd.linear.x = 0;
  vel_pub.publish(vel_cmd);

  // Store map
  cv::imwrite("mapa.png", map);

  // Close file
  outfile.close();

  return 1;
}
Beispiel #26
0
void CvEM::kmeans( const CvVectors& train_data, int nclusters, CvMat* labels,
                   CvTermCriteria termcrit, const CvMat* centers0 )
{
    CvMat* centers = 0;
    CvMat* old_centers = 0;
    CvMat* counters = 0;

    CV_FUNCNAME( "CvEM::kmeans" );

    __BEGIN__;

    CvRNG rng = cvRNG(-1);
    int i, j, k, nsamples, dims;
    int iter = 0;
    double max_dist = DBL_MAX;

    termcrit = cvCheckTermCriteria( termcrit, 1e-6, 100 );
    termcrit.epsilon *= termcrit.epsilon;
    nsamples = train_data.count;
    dims = train_data.dims;
    nclusters = MIN( nclusters, nsamples );

    CV_CALL( centers = cvCreateMat( nclusters, dims, CV_64FC1 ));
    CV_CALL( old_centers = cvCreateMat( nclusters, dims, CV_64FC1 ));
    CV_CALL( counters = cvCreateMat( 1, nclusters, CV_32SC1 ));
    cvZero( old_centers );

    if( centers0 )
    {
        CV_CALL( cvConvert( centers0, centers ));
    }
    else
    {
        for( i = 0; i < nsamples; i++ )
            labels->data.i[i] = i*nclusters/nsamples;
        cvRandShuffle( labels, &rng );
    }

    for( ;; )
    {
        CvMat* temp;

        if( iter > 0 || centers0 )
        {
            for( i = 0; i < nsamples; i++ )
            {
                const float* s = train_data.data.fl[i];
                int k_best = 0;
                double min_dist = DBL_MAX;

                for( k = 0; k < nclusters; k++ )
                {
                    const double* c = (double*)(centers->data.ptr + k*centers->step);
                    double dist = 0;

                    for( j = 0; j <= dims - 4; j += 4 )
                    {
                        double t0 = c[j] - s[j];
                        double t1 = c[j+1] - s[j+1];
                        dist += t0*t0 + t1*t1;
                        t0 = c[j+2] - s[j+2];
                        t1 = c[j+3] - s[j+3];
                        dist += t0*t0 + t1*t1;
                    }

                    for( ; j < dims; j++ )
                    {
                        double t = c[j] - s[j];
                        dist += t*t;
                    }

                    if( min_dist > dist )
                    {
                        min_dist = dist;
                        k_best = k;
                    }
                }

                labels->data.i[i] = k_best;
            }
        }

        if( ++iter > termcrit.max_iter )
            break;

        CV_SWAP( centers, old_centers, temp );
        cvZero( centers );
        cvZero( counters );

        // update centers
        for( i = 0; i < nsamples; i++ )
        {
            const float* s = train_data.data.fl[i];
            k = labels->data.i[i];
            double* c = (double*)(centers->data.ptr + k*centers->step);

            for( j = 0; j <= dims - 4; j += 4 )
            {
                double t0 = c[j] + s[j];
                double t1 = c[j+1] + s[j+1];

                c[j] = t0;
                c[j+1] = t1;

                t0 = c[j+2] + s[j+2];
                t1 = c[j+3] + s[j+3];

                c[j+2] = t0;
                c[j+3] = t1;
            }
            for( ; j < dims; j++ )
                c[j] += s[j];
            counters->data.i[k]++;
        }

        if( iter > 1 )
            max_dist = 0;

        for( k = 0; k < nclusters; k++ )
        {
            double* c = (double*)(centers->data.ptr + k*centers->step);
            if( counters->data.i[k] != 0 )
            {
                double scale = 1./counters->data.i[k];
                for( j = 0; j < dims; j++ )
                    c[j] *= scale;
            }
            else
            {
                const float* s;
                for( j = 0; j < 10; j++ )
                {
                    i = cvRandInt( &rng ) % nsamples;
                    if( counters->data.i[labels->data.i[i]] > 1 )
                        break;
                }
                s = train_data.data.fl[i];
                for( j = 0; j < dims; j++ )
                    c[j] = s[j];
            }

            if( iter > 1 )
            {
                double dist = 0;
                const double* c_o = (double*)(old_centers->data.ptr + k*old_centers->step);
                for( j = 0; j < dims; j++ )
                {
                    double t = c[j] - c_o[j];
                    dist += t*t;
                }
                if( max_dist < dist )
                    max_dist = dist;
            }
        }

        if( max_dist < termcrit.epsilon )
            break;
    }

    cvZero( counters );
    for( i = 0; i < nsamples; i++ )
        counters->data.i[labels->data.i[i]]++;

    // ensure that we do not have empty clusters
    for( k = 0; k < nclusters; k++ )
        if( counters->data.i[k] == 0 )
            for(;;)
            {
                i = cvRandInt(&rng) % nsamples;
                j = labels->data.i[i];
                if( counters->data.i[j] > 1 )
                {
                    labels->data.i[i] = k;
                    counters->data.i[j]--;
                    counters->data.i[k]++;
                    break;
                }
            }

    __END__;

    cvReleaseMat( &centers );
    cvReleaseMat( &old_centers );
    cvReleaseMat( &counters );
}
Beispiel #27
0
// This function create cross-validation EstimateModel.
ML_IMPL CvStatModel*
cvCreateCrossValidationEstimateModel(
    int                samples_all,
    const CvStatModelParams* estimateParams,
    const CvMat*             sampleIdx)
{
    CvStatModel*            model   = NULL;
    CvCrossValidationModel* crVal   = NULL;

    CV_FUNCNAME ("cvCreateCrossValidationEstimateModel");
    __BEGIN__

    int  k_fold = 10;

    int  i, j, k, s_len;
    int  samples_selected;
    CvRNG rng;
    CvRNG* prng;
    int* res_s_data;
    int* te_s_data;
    int* folds;

    rng = cvRNG(cvGetTickCount());
    cvRandInt (&rng);
    cvRandInt (&rng);
    cvRandInt (&rng);
    cvRandInt (&rng);
// Check input parameters.
    if (estimateParams)
        k_fold = ((CvCrossValidationParams*)estimateParams)->k_fold;
    if (!k_fold)
    {
        CV_ERROR (CV_StsBadArg, "Error in parameters of cross-validation (k_fold == 0)!");
    }
    if (samples_all <= 0)
    {
        CV_ERROR (CV_StsBadArg, "<samples_all> should be positive!");
    }

// Alloc memory and fill standart StatModel's fields.
    CV_CALL (crVal = (CvCrossValidationModel*)cvCreateStatModel (
                         CV_STAT_MODEL_MAGIC_VAL | CV_CROSSVAL_MAGIC_VAL,
                         sizeof(CvCrossValidationModel),
                         cvReleaseCrossValidationModel,
                         NULL, NULL));
    crVal->current_fold    = -1;
    crVal->folds_all       = k_fold;
    if (estimateParams && ((CvCrossValidationParams*)estimateParams)->is_regression)
        crVal->is_regression = 1;
    else
        crVal->is_regression = 0;
    if (estimateParams && ((CvCrossValidationParams*)estimateParams)->rng)
        prng = ((CvCrossValidationParams*)estimateParams)->rng;
    else
        prng = &rng;

    // Check and preprocess sample indices.
    if (sampleIdx)
    {
        int s_step;
        int s_type = 0;

        if (!CV_IS_MAT (sampleIdx))
            CV_ERROR (CV_StsBadArg, "Invalid sampleIdx array");

        if (sampleIdx->rows != 1 && sampleIdx->cols != 1)
            CV_ERROR (CV_StsBadSize, "sampleIdx array must be 1-dimensional");

        s_len = sampleIdx->rows + sampleIdx->cols - 1;
        s_step = sampleIdx->rows == 1 ?
                 1 : sampleIdx->step / CV_ELEM_SIZE(sampleIdx->type);

        s_type = CV_MAT_TYPE (sampleIdx->type);

        switch (s_type)
        {
        case CV_8UC1:
        case CV_8SC1:
        {
            uchar* s_data = sampleIdx->data.ptr;

            // sampleIdx is array of 1's and 0's -
            // i.e. it is a mask of the selected samples
            if( s_len != samples_all )
                CV_ERROR (CV_StsUnmatchedSizes,
                          "Sample mask should contain as many elements as the total number of samples");

            samples_selected = 0;
            for (i = 0; i < s_len; i++)
                samples_selected += s_data[i * s_step] != 0;

            if (samples_selected == 0)
                CV_ERROR (CV_StsOutOfRange, "No samples is selected!");
        }
        s_len = samples_selected;
        break;
        case CV_32SC1:
            if (s_len > samples_all)
                CV_ERROR (CV_StsOutOfRange,
                          "sampleIdx array may not contain more elements than the total number of samples");
            samples_selected = s_len;
            break;
        default:
            CV_ERROR (CV_StsUnsupportedFormat, "Unsupported sampleIdx array data type "
                      "(it should be 8uC1, 8sC1 or 32sC1)");
        }

        // Alloc additional memory for internal Idx and fill it.
        /*!!*/  CV_CALL (res_s_data = crVal->sampleIdxAll =
                                          (int*)cvAlloc (2 * s_len * sizeof(int)));

        if (s_type < CV_32SC1)
        {
            uchar* s_data = sampleIdx->data.ptr;
            for (i = 0; i < s_len; i++)
                if (s_data[i * s_step])
                {
                    *res_s_data++ = i;
                }
            res_s_data = crVal->sampleIdxAll;
        }
        else
        {
            int* s_data = sampleIdx->data.i;
            int out_of_order = 0;

            for (i = 0; i < s_len; i++)
            {
                res_s_data[i] = s_data[i * s_step];
                if (i > 0 && res_s_data[i] < res_s_data[i - 1])
                    out_of_order = 1;
            }

            if (out_of_order)
                qsort (res_s_data, s_len, sizeof(res_s_data[0]), icvCmpIntegers);

            if (res_s_data[0] < 0 ||
                    res_s_data[s_len - 1] >= samples_all)
                CV_ERROR (CV_StsBadArg, "There are out-of-range sample indices");
            for (i = 1; i < s_len; i++)
                if (res_s_data[i] <= res_s_data[i - 1])
                    CV_ERROR (CV_StsBadArg, "There are duplicated");
        }
    }
    else // if (sampleIdx)
    {
        // Alloc additional memory for internal Idx and fill it.
        s_len = samples_all;
        CV_CALL (res_s_data = crVal->sampleIdxAll = (int*)cvAlloc (2 * s_len * sizeof(int)));
        for (i = 0; i < s_len; i++)
        {
            *res_s_data++ = i;
        }
        res_s_data = crVal->sampleIdxAll;
    } // if (sampleIdx) ... else

// Resort internal Idx.
    te_s_data = res_s_data + s_len;
    for (i = s_len; i > 1; i--)
    {
        j = cvRandInt (prng) % i;
        k = *(--te_s_data);
        *te_s_data = res_s_data[j];
        res_s_data[j] = k;
    }

// Duplicate resorted internal Idx.
// It will be used to simplify operation of getting trainIdx.
    te_s_data = res_s_data + s_len;
    for (i = 0; i < s_len; i++)
    {
        *te_s_data++ = *res_s_data++;
    }

// Cut sampleIdxAll to parts.
    if (k_fold > 0)
    {
        if (k_fold > s_len)
        {
            CV_ERROR (CV_StsBadArg,
                      "Error in parameters of cross-validation ('k_fold' > #samples)!");
        }
        folds = crVal->folds = (int*) cvAlloc ((k_fold + 1) * sizeof (int));
        *folds++ = 0;
        for (i = 1; i < k_fold; i++)
        {
            *folds++ = cvRound (i * s_len * 1. / k_fold);
        }
        *folds = s_len;
        folds = crVal->folds;

        crVal->max_fold_size = (s_len - 1) / k_fold + 1;
    }
    else
    {
        k = -k_fold;
        crVal->max_fold_size = k;
        if (k >= s_len)
        {
            CV_ERROR (CV_StsBadArg,
                      "Error in parameters of cross-validation (-'k_fold' > #samples)!");
        }
        crVal->folds_all = k = (s_len - 1) / k + 1;

        folds = crVal->folds = (int*) cvAlloc ((k + 1) * sizeof (int));
        for (i = 0; i < k; i++)
        {
            *folds++ = -i * k_fold;
        }
        *folds = s_len;
        folds = crVal->folds;
    }

// Prepare other internal fields to working.
    CV_CALL (crVal->predict_results = cvCreateMat (1, samples_all, CV_32FC1));
    CV_CALL (crVal->sampleIdxEval = cvCreateMatHeader (1, 1, CV_32SC1));
    CV_CALL (crVal->sampleIdxTrain = cvCreateMatHeader (1, 1, CV_32SC1));
    crVal->sampleIdxEval->cols = 0;
    crVal->sampleIdxTrain->cols = 0;
    crVal->samples_all = s_len;
    crVal->is_checked = 1;

    crVal->getTrainIdxMat = cvCrossValGetTrainIdxMatrix;
    crVal->getCheckIdxMat = cvCrossValGetCheckIdxMatrix;
    crVal->nextStep = cvCrossValNextStep;
    crVal->check = cvCrossValCheckClassifier;
    crVal->getResult = cvCrossValGetResult;
    crVal->reset = cvCrossValReset;

    model = (CvStatModel*)crVal;

    __END__

    if (!model)
    {
        cvReleaseCrossValidationModel ((CvStatModel**)&crVal);
    }

    return model;
} // End of cvCreateCrossValidationEstimateModel