Example #1
0
void bContourFinder::findConvexHulls(){
  CvMemStorage *stor = cvCreateMemStorage(); 
  CvSeq * ptseq = cvCreateSeq( CV_SEQ_KIND_CURVE|CV_32SC2,
                                    sizeof(CvContour),
                                    sizeof(CvPoint),
                                    stor );
  CvSeq * hull; 
  CvPoint pt; 
  this->convexBlobs.clear();
  for(int i = 0 ; i < this->blobs.size(); i++){
    this->convexBlobs.push_back(ofxCvBlob());
    this->convexBlobs[i] = this->blobs[i];
    this->convexBlobs[i].pts.clear(); 
    // get blob i
    for(int j = 0; j < this->blobs[i].pts.size(); j++){
      // fill in blob points 
      pt.x = this->blobs[i].pts[j].x; 
      pt.y = this->blobs[i].pts[j].y; 
      cvSeqPush( ptseq, &pt); 
    }
  
    hull = cvConvexHull2(ptseq, 0, CV_CLOCKWISE, 0); 
   
    // get the points for the blob:
    CvPoint           pt  = **CV_GET_SEQ_ELEM( CvPoint*, hull, hull->total - 1 );
    for( int j=0; j < hull->total; j++ ) {
      pt = **CV_GET_SEQ_ELEM( CvPoint*, hull, j );
      convexBlobs[i].pts.push_back( ofPoint((float)pt.x, (float)pt.y) );
    }
    convexBlobs[i].nPts = convexBlobs[i].pts.size();
  }
  cvClearMemStorage( stor ); 
}
void sceneManager::passInPacket(computerVisionPacket * packet){

	memcpy(&(scenes[currentScene]->packet), packet, sizeof(computerVisionPacket));


	// this is where we update the blob tracker
	networkBlobs.clear();
	networkBlobs.assign(packet->nBlobs, ofxCvBlob());

	for(int i=0; i<packet->nBlobs; i++) {

		networkBlobs[i].hole		 = packet->bAmInner[i];
		networkBlobs[i].boundingRect = packet->rect[i];
		networkBlobs[i].centroid = packet->centroid[i];
		networkBlobs[i].nPts = packet->nPts[i];
		networkBlobs[i].pts.assign(packet->nPts[i], ofPoint());

		for (int j = 0; j < packet->nPts[i]; j++) {

			float x = packet->pts[i][j].x;
			float y = packet->pts[i][j].y;
			networkBlobs[i].pts[j].set(x, y);

		}
	}

	// go and track them
	tracker.trackBlobs(networkBlobs);


}
Example #3
0
void bContourFinder::smoothApproxChains(){ //int smoothMod, float value){
  CvMemStorage *stor = cvCreateMemStorage(); 
  CvSeq * ptseq = cvCreateSeq( CV_SEQ_KIND_CURVE  | CV_SEQ_ELTYPE_POINT,
                                    sizeof(CvSeq),
                                    sizeof(CvPoint),
                                    stor );
  CvSeq * hull; 
  CvPoint pt; 
  this->convexBlobs.clear();
  for(int i = 0 ; i < this->blobs.size(); i++){
    this->convexBlobs.push_back(ofxCvBlob());
    this->convexBlobs[i] = this->blobs[i];
    this->convexBlobs[i].pts.clear(); 
    // get blob i
    for(int j = 0; j < this->blobs[i].pts.size(); j++){
      // fill in blob points 
      pt.x = this->blobs[i].pts[j].x; 
      pt.y = this->blobs[i].pts[j].y; 
      cvSeqPush( ptseq, &pt); 
    }
  

    hull = cvApproxPoly(ptseq, sizeof(CvContour), stor,
        CV_POLY_APPROX_DP, cvContourPerimeter(ptseq)*0.004, 0);

    // get the points for the blob:
    CvPoint           pt  = **CV_GET_SEQ_ELEM( CvPoint*, hull, hull->total - 1 );
    for( int j=0; j < hull->total; j++ ) {
      pt = **CV_GET_SEQ_ELEM( CvPoint*, hull, j );
      convexBlobs[i].pts.push_back( ofPoint((float)pt.x, (float)pt.y) );
    }
    convexBlobs[i].nPts = convexBlobs[i].pts.size();
  }
  cvClearMemStorage( stor ); 
  
}
//--------------------------------------------------------------------------------
int ofxCvContourFinder::findContours( ofxCvGrayscaleImage&  input,
									  int minArea,
									  int maxArea,
									  int nConsidered,
									  bool bFindHoles,
                                      bool bUseApproximation) {

    // get width/height disregarding ROI
    IplImage* ipltemp = input.getCvImage();
    _width = ipltemp->width;
    _height = ipltemp->height;

	reset();

	// opencv will clober the image it detects contours on, so we want to
    // copy it into a copy before we detect contours.  That copy is allocated
    // if necessary (necessary = (a) not allocated or (b) wrong size)
	// so be careful if you pass in different sized images to "findContours"
	// there is a performance penalty, but we think there is not a memory leak
    // to worry about better to create mutiple contour finders for different
    // sizes, ie, if you are finding contours in a 640x480 image but also a
    // 320x240 image better to make two ofxCvContourFinder objects then to use
    // one, because you will get penalized less.

	if( inputCopy.getWidth() == 0 ) {
		inputCopy.setUseTexture(false);
		inputCopy.allocate( _width, _height );
	} else if( inputCopy.getWidth() != _width || inputCopy.getHeight() != _height ) {
        // reallocate to new size
        inputCopy.clear();
		inputCopy.setUseTexture(false);		
        inputCopy.allocate( _width, _height );
	}

    inputCopy.setROI( input.getROI() );
    inputCopy = input;

	CvSeq* contour_list = NULL;
	contour_storage = cvCreateMemStorage( 1000 );
	storage	= cvCreateMemStorage( 1000 );

	CvContourRetrievalMode  retrieve_mode
        = (bFindHoles) ? CV_RETR_LIST : CV_RETR_EXTERNAL;
	cvFindContours( inputCopy.getCvImage(), contour_storage, &contour_list,
                    sizeof(CvContour), retrieve_mode, bUseApproximation ? CV_CHAIN_APPROX_SIMPLE : CV_CHAIN_APPROX_NONE );
	CvSeq* contour_ptr = contour_list;

	// put the contours from the linked list, into an array for sorting
	while( (contour_ptr != NULL) ) {
		float area = fabs( cvContourArea(contour_ptr, CV_WHOLE_SEQ) );
		if( (area > minArea) && (area < maxArea) ) {
            cvSeqBlobs.push_back(contour_ptr);
		}
		contour_ptr = contour_ptr->h_next;
	}


	// sort the pointers based on size
	if( cvSeqBlobs.size() > 1 ) {
        sort( cvSeqBlobs.begin(), cvSeqBlobs.end(), sort_carea_compare );
	}


	// now, we have cvSeqBlobs.size() contours, sorted by size in the array
    // cvSeqBlobs let's get the data out and into our structures that we like
	for( int i = 0; i < MIN(nConsidered, (int)cvSeqBlobs.size()); i++ ) {
		blobs.push_back( ofxCvBlob() );
		float area = cvContourArea( cvSeqBlobs[i], CV_WHOLE_SEQ );
		CvRect rect	= cvBoundingRect( cvSeqBlobs[i], 0 );
		cvMoments( cvSeqBlobs[i], myMoments );

		blobs[i].area                     = fabs(area);
		blobs[i].hole                     = area < 0 ? true : false;
		blobs[i].length 			      = cvArcLength(cvSeqBlobs[i]);
		blobs[i].boundingRect.x           = rect.x;
		blobs[i].boundingRect.y           = rect.y;
		blobs[i].boundingRect.width       = rect.width;
		blobs[i].boundingRect.height      = rect.height;
		blobs[i].centroid.x 			  = (myMoments->m10 / myMoments->m00);
		blobs[i].centroid.y 			  = (myMoments->m01 / myMoments->m00);

		// get the points for the blob:
		CvPoint           pt;
		CvSeqReader       reader;
		cvStartReadSeq( cvSeqBlobs[i], &reader, 0 );

    	for( int j=0; j < cvSeqBlobs[i]->total; j++ ) {
			CV_READ_SEQ_ELEM( pt, reader );
            blobs[i].pts.push_back( ofPoint((float)pt.x, (float)pt.y) );
		}
		blobs[i].nPts = blobs[i].pts.size();

	}

    nBlobs = blobs.size();

	// Free the storage memory.
	// Warning: do this inside this function otherwise a strange memory leak
	if( contour_storage != NULL ) { cvReleaseMemStorage(&contour_storage); }
	if( storage != NULL ) { cvReleaseMemStorage(&storage); }

	return nBlobs;

}
Example #5
0
ofxCvBlob JCHaarFinder::makeBlob(int x, int y, int w, int h) {
    ofxCvBlob blob = ofxCvBlob();
    blob.boundingRect = ofRectangle(x, y, w, h);

    return blob;
}
//--------------------------------------------------------------------------------
void blobTracker::track(vector<ofVec3f>& points, int aliveFrames, float speedThreshold){

    ///Массив с информацией о пользователях на новом кадре
    vector<touch> newTouchList;

    ///Шаг 1. Задаем всем новым блобам - вероятным пользователям id = -1 и копируем данные блоба
    for(unsigned int i = 0; i < points.size(); i++){

        touch t;
        t.id = -1;
		t.blob = ofxCvBlob();
		t.blob.centroid = points[i];
		t.framesToLive = aliveFrames;
        newTouchList.push_back(t);
		newTouchList.back().filter.setup(2,15,1,1);
		newTouchList.back().filter.setState(points[i], 0.1);
    }
    ///Шаг 2. Делаем трекинг блобов с предыдущего кадра - находим среди новых ближайший
    for(unsigned int i = 0; i < touchList.size(); i++){

        int resultIndex = trackKnn(&newTouchList, &(touchList[i].blob), 3);

        if(resultIndex == -1){
			///Защита от промаргивания
			touchList[i].framesToLive--;
			if(touchList[i].framesToLive <= 0)
				touchList[i].id = -1;///Удаление пользователя
		}
        else{///Пользователь найден в списке

            ///Если для нового блоба оказался другой возможный вариант - сравниваем их
            if(newTouchList[resultIndex].id != -1){

                unsigned int j;
                for(j = 0; j < touchList.size(); j++){
                    if(touchList[j].id == newTouchList[resultIndex].id)
                        break;
                }
                if(j == touchList.size()){
                    newTouchList[resultIndex].id = touchList[i].id;
                    touchList[i] = newTouchList[resultIndex];
                }
                ///Сравнение с блобом-"конкурентом"
                else{

                    double x = newTouchList[resultIndex].blob.centroid.x;
                    double y = newTouchList[resultIndex].blob.centroid.y;
                    double xOld = touchList[j].blob.centroid.x;
                    double yOld = touchList[j].blob.centroid.y;
                    double xNew = touchList[i].blob.centroid.x;
                    double yNew = touchList[i].blob.centroid.y;
                    double distOld = (x-xOld)*(x-xOld)+(y-yOld)*(y-yOld);
                    double distNew = (x-xNew)*(x-xNew)+(y-yNew)*(y-yNew);

                    if(distNew < distOld){
                        newTouchList[resultIndex].id = touchList[i].id;
                        touchList[j].id = -1;
                    }
                    else touchList[i].id = -1;
                }
            }
            ///Трекинг прошел без конфликтов
            else newTouchList[resultIndex].id = touchList[i].id;

        }
    }
    ///Шаг 3. Обновление позиций пользователей с предыдущего кадра, удаление тех, которые не были обнаружены
    for(unsigned int i = 0; i < touchList.size(); i++)
	{
		if(touchList[i].id == -1){///Удаление пользователя
			ofNotifyEvent(blobDeleted, touchList[i].blob);
			touchList.erase(touchList.begin() + i, touchList.begin() + i + 1);
			i--;
		}
		else{

			for(unsigned int j = 0; j < newTouchList.size(); j++)
				if(touchList[i].id == newTouchList[j].id){
					///Обновление данных
					ofVec3f lastCentroid = touchList[i].blob.centroid;///Центроид с предыдущего кадра
					touchList[i].blob = newTouchList[j].blob;
					touchList[i].framesToLive = aliveFrames;

					ofVec3f positionDifference;
					positionDifference.set(touchList[i].blob.centroid.x - lastCentroid.x, touchList[i].blob.centroid.y - lastCentroid.y);

					///Фрагмент кода из CCV
					//float posDelta = sqrtf((positionDifference.x*positionDifference.x)+(positionDifference.y*positionDifference.y));

					//int MOVEMENT_FILTERING = 0;
					//float a = 1.0f - 1.0f / expf(posDelta / (1.0f + (float)MOVEMENT_FILTERING*10));
					//users[i].blob.centroid.x = a * users[i].blob.centroid.x + (1-a) * lastCentroid.x;
					//users[i].blob.centroid.y = a * users[i].blob.centroid.y + (1-a) * lastCentroid.y;
					///Конец фрагмента из CCV

					/// Фильтр Калмана
					if(positionDifference.length() > speedThreshold){
						touchList[i].blob.centroid = touchList[i].filter.getCorrect(touchList[i].blob.centroid);
						ofNotifyEvent(blobMoved, touchList[i].blob);
					}
					else touchList[i].blob.centroid = lastCentroid;

					/*touchList[i].blob.D.set(touchList[i].blob.centroid.x - lastCentroid.x, 
                                          touchList[i].blob.centroid.y - lastCentroid.y);*/
				}
		}
	}
    ///Шаг 4. Добавляем новых возможных пользователей
    for(unsigned int i = 0; i < newTouchList.size(); i++){
		if(newTouchList[i].id == -1){

            newTouchList[i].id = idCounter;
            idCounter++;
            touchList.push_back(newTouchList[i]);
			touchList.back().blob.id = touchList.back().id;
			ofNotifyEvent(blobAdded, touchList.back().blob);
		}
	}
}