/** - FUNCTION: CBlobGetCompactness - FUNCTIONALITY: Calculates the compactness of the blob ( maximum for circle shaped blobs, minimum for the rest) - PARAMETERS: - RESULT: - RESTRICTIONS: - AUTHOR: Ricard Borràs - CREATION DATE: 25-05-2005. - MODIFICATION: Date. Author. Description. */ double CBlobGetCompactness::operator()(CBlob &blob) { if( blob.Area() != 0.0 ) return (double) pow(blob.Perimeter(),2)/(4*CV_PI*blob.Area()); else return 0.0; }
/** - FUNCTION: CBlobGetElongation - FUNCTIONALITY: Calculates the elongation of the blob ( length/breadth ) - PARAMETERS: - RESULT: - RESTRICTIONS: - See below to see how the length and the breadth are aproximated - AUTHOR: Ricard Borr� - CREATION DATE: 25-05-2005. - MODIFICATION: Date. Author. Description. */ double CBlobGetElongation::operator()(const CBlob &blob) const { double ampladaC,longitudC,amplada,longitud; ampladaC=(double) (blob.Perimeter()+sqrt(pow(blob.Perimeter(),2)-16*blob.Area()))/4; if(ampladaC<=0.0) return 0; longitudC=(double) blob.Area()/ampladaC; longitud=MAX( longitudC , ampladaC ); amplada=MIN( longitudC , ampladaC ); return (double) longitud/amplada; }
/** - FUNCTION: CBlobGetBreadth - FUNCTIONALITY: Calculates the breadth of the blob (the smallest axis of the blob) - PARAMETERS: - RESULT: - RESTRICTIONS: - The breadth is an aproximation to the real breadth - AUTHOR: Ricard Borràs - CREATION DATE: 25-05-2005. - MODIFICATION: Date. Author. Description. */ double CBlobGetBreadth::operator()(CBlob &blob) { double ampladaC,longitudC; double tmp; tmp = blob.Perimeter()*blob.Perimeter() - 16*blob.Area(); if( tmp > 0.0 ) ampladaC = (double) (blob.Perimeter()+sqrt(tmp))/4; // error intrínsec en els càlculs de l'àrea i el perímetre else ampladaC = (double) (blob.Perimeter())/4; if(ampladaC<=0.0) return 0; longitudC = (double) blob.Area()/ampladaC; return MIN( longitudC , ampladaC ); }
/** - FUNCTION: CBlobGetLength - FUNCTIONALITY: Calculates the length of the blob (the biggest axis of the blob) - PARAMETERS: - RESULT: - RESTRICTIONS: - The length is an aproximation to the real length - AUTHOR: Ricard Borr� - CREATION DATE: 25-05-2005. - MODIFICATION: Date. Author. Description. */ double CBlobGetLength::operator()(const CBlob &blob) const { double ampladaC,longitudC; double tmp; tmp = blob.Perimeter()*blob.Perimeter() - 16*blob.Area(); if( tmp > 0.0 ) ampladaC = (double) (blob.Perimeter()+sqrt(tmp))/4; // error intr�sec en els c�culs de l'�ea i el per�etre else ampladaC = (double) (blob.Perimeter())/4; if(ampladaC<=0.0) return 0; longitudC=(double) blob.Area()/ampladaC; return MAX( longitudC , ampladaC ); }
/** - FUNCTION: CBlob - FUNCTIONALITY: Copy constructor - PARAMETERS: - RESULT: - RESTRICTIONS: - AUTHOR: Ricard Borr� - CREATION DATE: 25-05-2005. - MODIFICATION: Date. Author. Description. */ CBlob::CBlob( const CBlob &src ) { // copiem les propietats del blob origen a l'actual etiqueta = src.etiqueta; exterior = src.exterior; area = src.Area(); perimeter = src.Perimeter(); parent = src.parent; minx = src.minx; maxx = src.maxx; miny = src.miny; maxy = src.maxy; sumx = src.sumx; sumy = src.sumy; sumxx = src.sumxx; sumyy = src.sumyy; sumxy = src.sumxy; mean = src.mean; stddev = src.stddev; externPerimeter = src.externPerimeter; // copiem els edges del blob origen a l'actual CvSeqReader reader; CvSeqWriter writer; CvPoint edgeactual; // creem una sequencia buida per als edges m_storage = cvCreateMemStorage(0); edges = cvCreateSeq( CV_SEQ_KIND_GENERIC|CV_32SC2, sizeof(CvContour), sizeof(CvPoint),m_storage); cvStartReadSeq( src.Edges(), &reader); cvStartAppendToSeq( edges, &writer ); for( int i=0; i< src.Edges()->total; i++) { CV_READ_SEQ_ELEM( edgeactual ,reader); CV_WRITE_SEQ_ELEM( edgeactual , writer ); } cvEndWriteSeq( &writer ); }
void PSTouch::blobDetect(cv::Mat& image){ CBlobResult res(image,cv::Mat(),NUMCORES); std::vector<TouchEvent> events; qRegisterMetaType<std::vector<TouchEvent > >("std::vector<TouchEvent>"); for (unsigned int i = 0; i<res.GetNumBlobs(); i++){ CBlob blob = res.GetBlob(i); if(blob.Area()<3) { continue; } cv::Point point = blob.getCenter(); cv::Point3f camPoint(point.x,point.y,groundTruth->at<openni::DepthPixel>(point.y,point.x)); cv::Point2i p = transform->transformPointfromCamToProjector(camPoint); if(p.x < 1200 && p.y < 700 && p.x>0 && p.y >0){ TouchEvent event(p,camPoint); events.push_back(event); } } if(events.size() >10){ qDebug("RECALIBRATE"); calibrateTouch(); } emit updateEvents(events); //Timing timerCount++; //qDebug()<<"Timer: "<< timerCount; if(timerCount==60){ timerCount=0; int x = timer.restart(); float fps = 50.0/((float)x/1000.0); qDebug() << " working with: " << fps << "fps " << x; timer.restart(); } }
/** - FUNCTION: Moment - FUNCTIONALITY: Calculates the pq moment of the blob - PARAMETERS: - RESULT: - returns the pq moment or 0 if the moment it is not implemented - RESTRICTIONS: - Currently, only implemented the 00, 01, 10, 20, 02 pq moments - AUTHOR: Ricard Borr� - CREATION DATE: 20-07-2004. - MODIFICATION: Date. Author. Description. */ double CBlobGetMoment::operator()(const CBlob &blob) const { //Moment 00 if((m_p==0) && (m_q==0)) return blob.Area(); //Moment 10 if((m_p==1) && (m_q==0)) return blob.SumX(); //Moment 01 if((m_p==0) && (m_q==1)) return blob.SumY(); //Moment 20 if((m_p==2) && (m_q==0)) return blob.SumXX(); //Moment 02 if((m_p==0) && (m_q==2)) return blob.SumYY(); return 0; }
void Auvsi_Recognize::extractLetter( void ) { typedef cv::Vec<unsigned char, 1> VT_binary; #ifdef TWO_CHANNEL typedef cv::Vec<T, 2> VT; #else typedef cv::Vec<T, 3> VT; #endif typedef cv::Vec<int, 1> IT; // Erode input slightly cv::Mat input; cv::erode( _shape, input, cv::Mat() ); // Remove any small white blobs left over CBlobResult blobs; CBlob * currentBlob; CBlob biggestBlob; IplImage binaryIpl = input; blobs = CBlobResult( &binaryIpl, NULL, 0 ); blobs.GetNthBlob( CBlobGetArea(), 0, biggestBlob ); blobs.Filter( blobs, B_EXCLUDE, CBlobGetArea(), B_GREATER_OR_EQUAL, biggestBlob.Area() ); for (int i = 0; i < blobs.GetNumBlobs(); i++ ) { currentBlob = blobs.GetBlob(i); currentBlob->FillBlob( &binaryIpl, cvScalar(0)); } // Perform k-means on this region only int areaLetter = (int)biggestBlob.Area(); cv::Mat kMeansInput = cv::Mat( areaLetter, 1, _image.type() ); // Discard if we couldn't extract a letter if( areaLetter <= 0 ) { _letter = cv::Mat( _shape ); _letter = cv::Scalar(0); return; } cv::MatIterator_<VT_binary> binaryIterator = input.begin<VT_binary>(); cv::MatIterator_<VT_binary> binaryEnd = input.end<VT_binary>(); cv::MatIterator_<VT> kMeansIterator = kMeansInput.begin<VT>(); for( ; binaryIterator != binaryEnd; ++binaryIterator ) { if( (*binaryIterator)[0] > 0 ) { (*kMeansIterator) = _image.at<VT>( binaryIterator.pos() ); ++kMeansIterator; } } // Get k-means labels cv::Mat labels = doClustering<T>( kMeansInput, 2, false ); int numZeros = areaLetter - cv::countNonZero( labels ); bool useZeros = numZeros < cv::countNonZero( labels ); // Reshape into original form _letter = cv::Mat( _shape.size(), _shape.type() ); _letter = cv::Scalar(0); binaryIterator = input.begin<VT_binary>(); binaryEnd = input.end<VT_binary>(); cv::MatIterator_<IT> labelsIterator = labels.begin<IT>(); for( int index = 0; binaryIterator != binaryEnd; ++binaryIterator ) { if( (*binaryIterator)[0] > 0 ) { // Whichever label was the minority, we make that value white and all other values black unsigned char value = (*labelsIterator)[0]; if( useZeros ) if( value ) value = 0; else value = 255; else if( value ) value = 255; else value = 0; _letter.at<VT_binary>( binaryIterator.pos() ) = VT_binary( value ); ++labelsIterator; } } }
void Auvsi_Recognize::extractShape( void ) { typedef cv::Vec<T, 1> VT; // Reduce input to two colors cv::Mat reducedColors = doClustering<T>( _image, 2 ); cv::Mat grayScaled, binary; // Make output grayscale grayScaled = convertToGray( reducedColors ); //cv::cvtColor( reducedColors, grayScaled, CV_RGB2GRAY ); // Make binary double min, max; cv::minMaxLoc( grayScaled, &min, &max ); cv::threshold( grayScaled, binary, min, 1.0, cv::THRESH_BINARY ); // ensure that background is black, image white if( binary.at<VT>(0, 0)[0] > 0.0f ) cv::threshold( grayScaled, binary, min, 1.0, cv::THRESH_BINARY_INV ); binary.convertTo( binary, CV_8U, 255.0f ); // Fill in all black regions smaller than largest black region with white CBlobResult blobs; CBlob * currentBlob; IplImage binaryIpl = binary; blobs = CBlobResult( &binaryIpl, NULL, 255 ); // Get area of biggest blob CBlob biggestBlob; blobs.GetNthBlob( CBlobGetArea(), 0, biggestBlob ); // Remove all blobs of smaller area blobs.Filter( blobs, B_EXCLUDE, CBlobGetArea(), B_GREATER_OR_EQUAL, biggestBlob.Area() ); for (int i = 0; i < blobs.GetNumBlobs(); i++ ) { currentBlob = blobs.GetBlob(i); currentBlob->FillBlob( &binaryIpl, cvScalar(255)); } // Fill in all small white regions black blobs = CBlobResult( &binaryIpl, NULL, 0 ); blobs.GetNthBlob( CBlobGetArea(), 0, biggestBlob ); blobs.Filter( blobs, B_EXCLUDE, CBlobGetArea(), B_GREATER_OR_EQUAL, biggestBlob.Area() ); for (int i = 0; i < blobs.GetNumBlobs(); i++ ) { currentBlob = blobs.GetBlob(i); currentBlob->FillBlob( &binaryIpl, cvScalar(0)); } binary = cv::Scalar(0); biggestBlob.FillBlob( &binaryIpl, cvScalar(255)); _shape = binary; }
int main(){ Scalar robotColor = CV_RGB(255, 0, 0); Scalar rightColor = CV_RGB(0, 255, 0); Scalar leftColor = CV_RGB(0, 0, 255); Scalar robotColor_2 = CV_RGB(0, 255, 255); Scalar rightColor_2 = CV_RGB(255, 0, 255); Scalar leftColor_2 = CV_RGB(255, 255, 0); int lowH = 0; int highH = 14; int top_cut = 120; int bot_cut = 70; int lowV = 200; int type = 0; int ticks = 0; int nb_errors = 0; int len = 150; int trace = 25; int sensitivity = 100; int area = 3000; int flip = 0; //set to 0 if no flips are needed, 1 for y axis, 2 for x axis and 3 for both namedWindow("My Window", 1); createTrackbar("lowH", "My Window", &lowH, 180); createTrackbar("highH", "My Window", &highH, 180); createTrackbar("top_cut", "My Window", &top_cut, 255); createTrackbar("bot_cut", "My Window", &bot_cut, 255); createTrackbar("lowV", "My Window", &lowV, 255); createTrackbar("LEN", "My Window", &len, 300); createTrackbar("TRACE", "My Window", &trace, 100); createTrackbar("SENSITIVITY", "My Window", &sensitivity, 200); createTrackbar("AREA", "My Window", &area, 7000); createTrackbar("FLIP", "My Window", &flip, 3); moveWindow("My Window", 0, 0); namedWindow("kalman", 1); moveWindow("kalman", 500, 0); namedWindow("Blobs Image", 1); moveWindow("Blobs Image", 500, 300); namedWindow("frame", 1); moveWindow("frame", 0, 500); namedWindow("test", WINDOW_AUTOSIZE); moveWindow("test", 0, 500); namedWindow("white", WINDOW_AUTOSIZE); moveWindow("white", 0, 500); //file of video input string filename = "testVideo_5.webm"; ofstream logs; ofstream stats; stats.open("stats.txt"); logs.open("logs.csv"); logs << "Left_x,Left_y,Left_holds,Right_x,Right_y,Right_holds,confirmed" << endl; Point center_window = Point(WIDTH/2, (HEIGHT - top_cut - bot_cut)/2); Point center_left = Point(WIDTH/4, .5*max(10, HEIGHT - top_cut - bot_cut)); Point center_right = Point(3*WIDTH/4, .5*max(10, HEIGHT - top_cut - bot_cut)); // initialize the kalman filters KalmanFilter KF_left(4, 2, 0); KalmanFilter KF_right(4, 2, 0); Mat_<float> measurement_left(2,1); measurement_left.setTo(Scalar(0)); Mat_<float> measurement_right(2,1); measurement_right.setTo(Scalar(0)); initialize_kalman(&KF_left, center_left); initialize_kalman(&KF_right, center_right); VideoCapture cap(0); // VideoCapture cap(filename); Mat kf_img(HEIGHT - top_cut - bot_cut, WIDTH, CV_8UC3); vector<Point> mousev_left,kalmanv_left; mousev_left.clear(); kalmanv_left.clear(); vector<Point> mousev_right,kalmanv_right; mousev_right.clear(); kalmanv_right.clear(); int counter = 0; int nb_confirmed = 0; int nb_total = 0; double average_left = 0; double average_right = 0; double error_left = 0; double error_right = 0; double prev_dist = norm(center_left - center_right); double new_dist = prev_dist; bool left_valid = false; bool right_valid = true; Mat temp = Mat::zeros(100,400, CV_8UC3); putText(temp, "Press any key to start", Point(50,50), FONT_HERSHEY_SIMPLEX, .5, Scalar(255,255,255)); putText(temp, "and ESC to end", Point(50, 75), FONT_HERSHEY_SIMPLEX, .5, Scalar(255,255,255)); imshow("Blobs Image", temp); waitKey(-1); int key; bool eof = false; for(;;){ Mat frame; Mat prediction_left = KF_left.predict(); Point new_left(prediction_left.at<float>(0), prediction_left.at<float>(1)); measurement_left(0) = center_left.x; measurement_left(1) = center_left.y; Mat estimated_left = KF_left.correct(measurement_left); Point statePt_left(estimated_left.at<float>(0),estimated_left.at<float>(1)); Point measPt_left(measurement_left(0),measurement_left(1)); Mat prediction_right = KF_right.predict(); Point new_right(prediction_right.at<float>(0), prediction_right.at<float>(1)); measurement_right(0) = center_right.x; measurement_right(1) = center_right.y; Mat estimated_right = KF_right.correct(measurement_right); Point statePt_right(estimated_right.at<float>(0),estimated_right.at<float>(1)); Point measPt_right(measurement_right(0),measurement_right(1)); ticks ++; error_left = norm(statePt_left - measPt_left); average_left = ((average_left * (ticks - 1)) + error_left) / ticks; error_right = norm(statePt_right - measPt_right); average_right = ((average_right * (ticks - 1)) + error_right) / ticks; imshow("kalman", kf_img); // waitKey(-1); kf_img = Scalar::all(0); mousev_left.push_back(measPt_left); kalmanv_left.push_back(statePt_left); circle(kf_img, statePt_left, 1, Scalar(255,255,255), -1); circle(kf_img, measPt_left, 1, Scalar(0,0,255), -1); int nb_mousev_left = mousev_left.size() - 1; int nb_kalmanv_left = mousev_left.size() - 1; int nb_mousev_right = mousev_left.size() - 1; int nb_kalmanv_right = mousev_left.size() - 1; for(int i = max(0, nb_mousev_left - trace); i< nb_mousev_left; i++){ line(kf_img, mousev_left[i], mousev_left[i+1], Scalar(255,255,0), 1); } for(int i = max(0, nb_kalmanv_left - trace); i< nb_kalmanv_left; i++){ line(kf_img, kalmanv_left[i], kalmanv_left[i+1], Scalar(0,0,255), 1); } mousev_right.push_back(measPt_right); kalmanv_right.push_back(statePt_right); circle(kf_img, statePt_right, 1, Scalar(255,255,255), -1); circle(kf_img, measPt_right, 1, Scalar(0,0,255), -1); for(int i = max(0, nb_mousev_right - trace); i< nb_mousev_right; i++){ line(kf_img, mousev_right[i], mousev_right[i+1], Scalar(0,255,0), 1); } for(int i = max(0, nb_kalmanv_right - trace); i< nb_kalmanv_right; i++){ line(kf_img, kalmanv_right[i], kalmanv_right[i+1], Scalar(255,0,255), 1); } Rect border(0, top_cut, WIDTH, max(10, HEIGHT - top_cut - bot_cut)); cap >> frame; if(!frame.empty()){ Mat image; int flip_type = 1; switch (flip) { case 0: break; case 1: break; case 2: flip_type = 0; break; case 3: flip_type = -1; break; } if(flip) cv::flip(frame, frame, flip_type); resize(frame, frame, Size(WIDTH, HEIGHT)); image = frame(border); imshow("frame", image); //performs the skin detection Mat converted_skin; cvtColor(image, converted_skin, CV_BGR2HSV); Mat skin_masked; inRange(converted_skin, Scalar(min(lowH, highH), 48, 80),Scalar(max(lowH, highH), 255, 255), skin_masked); imshow("test", skin_masked); //performs the robot detection Mat converted_white, white_masked, lights_masked; cvtColor(image, converted_white, CV_BGR2GRAY); inRange(converted_skin, Scalar(0, 0, 245), Scalar(180, 255, 255), lights_masked); threshold(converted_white, white_masked, lowV, 255, type); bitwise_or(white_masked, lights_masked, white_masked); imshow("white", white_masked); Mat copy(converted_skin.size(), converted_skin.type());// = converted.clone(); //detects hands as blobs CBlobResult blobs; IplImage temp = (IplImage)skin_masked; blobs = CBlobResult(&temp,NULL,1); blobs = CBlobResult(skin_masked,Mat(),NUMCORES); int numBlobs = blobs.GetNumBlobs(); if(0 == numBlobs){ cout << "can't find blobs!" << endl; continue; } // detects robot as a blob CBlobResult robot_blobs; IplImage robot_temp = (IplImage) white_masked; robot_blobs = CBlobResult(&robot_temp, NULL, 1); robot_blobs = CBlobResult(white_masked, Mat(), NUMCORES); if(0 == robot_blobs.GetNumBlobs()){ cout << "can't find robot_blobs!" << endl; continue; } CBlob *curblob; CBlob* blob_1; CBlob* blob_2; CBlob* leftBlob; CBlob* rightBlob; CBlob* robotBlob; copy.setTo(Vec3b(0,0,0)); // chooses the two largest blobs for the hands Point center_1, center_2; int max_1 = 0; int max_2 = 0; int maxArea_1 = 0; int maxArea_2 = 0; for(int i=0;i<numBlobs;i++){ int area = blobs.GetBlob(i)->Area(); if(area > maxArea_1){ maxArea_2 = maxArea_1; maxArea_1 = area; max_2 = max_1; max_1 = i; } else if(area > maxArea_2){ maxArea_2 = area; max_2 = i; } } int i_1 = max_1; int i_2 = max_2; double area_left, area_right; Rect rect_1; Rect rect_2; //determines which hand is left/right blob_1 = blobs.GetBlob(i_1); blob_2 = blobs.GetBlob(i_2); center_1 = blob_1->getCenter(); center_2 = blob_2->getCenter(); bool left_is_1 = (center_1.x < center_2.x)? true : false; leftBlob = (left_is_1)? blob_1 : blob_2; rightBlob = (left_is_1)? blob_2 : blob_1; center_left = leftBlob->getCenter(); center_right = rightBlob->getCenter(); //determine the number of valid hands //validity is decided by whether or not the hand followed a logical movement, //and if the area of the blob is large enough to be accepted int valids = 0; rect_1 = leftBlob->GetBoundingBox(); rectangle(copy, rect_1.tl(), rect_1.br(), leftColor_2, 5); error_left = norm(statePt_left - center_left); area_left = leftBlob->Area(); left_valid = error_left < sensitivity && area_left > area; if(left_valid){ leftBlob->FillBlob(copy,leftColor, true); valids ++; } circle(copy, center_left, 5, leftColor_2, -1); rect_2 = rightBlob->GetBoundingBox(); rectangle(copy, rect_2.tl(), rect_2.br(), rightColor_2, 5); error_right = norm(statePt_right - center_right); area_right = rightBlob->Area(); right_valid = error_right < sensitivity && area_right > area; if(right_valid){ rightBlob->FillBlob(copy,rightColor, true); valids ++; } circle(copy, center_right, 5, rightColor_2, -1); //finds the blob representing the robot //we could add a restriction to only choose a blob between the two hands //in terms of x-coordinate //a Kalman check can easily be done for the robot Point robot_center; maxArea_1 = 0; max_1 = 0; numBlobs = robot_blobs.GetNumBlobs(); if(0 < numBlobs){ for(int i=0;i<numBlobs;i++){ curblob = robot_blobs.GetBlob(i); robot_center = curblob->getCenter(); double dist_1 = norm(center_1 - robot_center); double dist_2 = norm(center_2 - robot_center); if(dist_1 < len || dist_2 < len){ double area = robot_blobs.GetBlob(i)->Area(); if(area > maxArea_1){ max_1 = i; maxArea_1 = area; } } } int i_3 = max_1; curblob = robot_blobs.GetBlob(i_3); curblob->FillBlob(copy,robotColor, true); robot_center = curblob->getCenter(); circle(copy, robot_center, 5, robotColor_2, -1); Rect rect_3 = curblob->GetBoundingBox(); rectangle(copy, rect_3.tl(), rect_3.br(), robotColor_2, 5); // determines which hand is controlling the robot // by cheching the position of the 3 blobs // an additional check could be done by verifying if //the center of the robot is moving in the same direction //as the center of the hand moving it bool is_left = false; bool is_right = false; bool confirmed = false; double dist_left = norm(center_left - robot_center); double dist_right = norm(center_right - robot_center); double dist_both = norm(center_left - center_right); Point robot_tl = rect_3.tl(); Point robot_br = rect_3.br(); int left_count = 0; int right_count = 0; if(rect_1.contains(robot_tl)) left_count++; if(rect_1.contains(robot_br)) left_count++; if(rect_1.contains(robot_center)) left_count++; if(rect_2.contains(robot_tl)) right_count++; if(rect_2.contains(robot_br)) right_count++; if(rect_2.contains(robot_center)) right_count++; switch(valids){ case 0: break; case 1:{ int area_sum = area_left + area_right; if(dist_left > 2* dist_right || dist_right > 2*dist_left){ if(area_sum > 2 * area && (area_left > 2*area_right || area_right > 2*area_left) && ((left_valid && left_count > 0)||(right_valid && right_count > 0))){ is_left = true; is_right = true; if(left_count > 2 || right_count > 2) confirmed = true; } } if(left_valid && left_count > 1) { is_left = true; if(left_count > 2) confirmed = true; } if(right_valid && right_count > 1) { is_right = true; if(right_count > 2) confirmed = true; } //if just one hand is on screen if(area_right < area/2){ if(center_left.x > robot_center.x){ is_left = true; } else{ is_right = true; } } else if (area_left < area/2){ if(center_right.x < robot_center.x){ is_right = true; } else{ is_left = true; } } break;} case 2:{ int moreLeft = left_count - right_count; int moreRight = right_count - left_count; int countSum = left_count + right_count; switch (countSum) { case 3:{ switch (left_count) { case 3: is_left = true; confirmed = true; break; case 2: case 1: is_left = true; is_right = true; confirmed= true; break; case 0: is_right = true; confirmed = true; break; } } case 2:{ switch (left_count) { case 2: is_left = true; confirmed = true; break; case 1: is_left = true; is_right = true; break; case 0: is_right = true; confirmed = true; break; } } case 1:{ switch (left_count) { case 1: is_left = true; break; case 0: is_right = true; break; } } case 0:{ break; } } break;} } bool found = false; for(size_t i = robot_tl.x; i<= robot_br.x && !found; i++){ for(size_t j = robot_tl.y; j<= robot_br.y && !found; j++){ int color1 = 0; int color2 = 255; Vec3b colour = copy.at<Vec3b>(Point(i, j)); if(colour[1] == color1 && colour[0] == color2){ found = true; is_left = true; } if(colour[1] == color2 && colour[0] == color1){ found = true; is_right = true; } } } if (found) confirmed = true; if(!is_left && !is_right){ cout << "-- none!"; if(left_count == 0 && right_count == 0) confirmed = true; } else if(is_left && is_right){ cout << "-- both!"; } else { if (is_left){ cout << " -- left!"; } else { cout << " -- right!"; } } imshow("kalman", kf_img); // up till here if(confirmed){ nb_confirmed ++; cout << " -- confirmed" << endl; } else { cout << endl; } csv(&logs, center_left.x, center_left.y, is_left, center_right.x, center_right.y, is_right, confirmed); } nb_total ++; // // //displayOverlay("Blobs Image","Multi Thread"); new_dist = norm(center_left - center_right); // don't throw errors in the first 10 frames if(ticks > 10){ if(error_left > 20 && error_right > 20 /*&& new_dist < prev_dist*/){ circle(copy, Point(WIDTH/2, HEIGHT/2), 100, Scalar(0, 0, 255), 30); nb_errors ++; } } prev_dist = new_dist; imshow("Blobs Image",copy); key = waitKey(10); } else{ eof = true; } if(27 == key || 1048603 == key || eof){ double kalman_error_percentage = (nb_errors*100.0)/ticks; double confirm_percentage = (nb_confirmed*100.0/nb_total); stats << "kalman error frequency: " << kalman_error_percentage << "\%" << endl; stats << "confirmed: " << confirm_percentage << "\%" << endl; logs.close(); stats.close(); return 0; } } }
IplImage* blobDetection2(IplImage* imgThreshRed, IplImage* imgThreshGreen) { // get blobs and filter them using its area int i, j; // int areaBlob = 100; float distMark = 10; CBlobResult blobsRed, blobsGreen, whiteRedBlobs, whiteGreenBlobs; CBlob *currentBlob; double px, py; // Create Image IplImage* displayedImage = cvCreateImage(cvGetSize(imgThreshRed), IPL_DEPTH_8U, 3); // find all the RED related blobs in the image blobsRed = CBlobResult(imgThreshRed, NULL, 0); // find all the GREEN related blobs in the image blobsGreen = CBlobResult(imgThreshGreen, NULL, 0); // select the ones with mean gray-level equal to 255 (white) and put // them in the whiteBlobs variable blobsRed.Filter(whiteRedBlobs, B_EXCLUDE, CBlobGetArea(), B_LESS, 1.0); blobsGreen.Filter(whiteGreenBlobs, B_EXCLUDE, CBlobGetArea(), B_LESS, 1.0); #ifdef DEBUG_PRINT printf("White Blobs: %d\n", whiteBlobs.GetNumBlobs()); #endif // display filtered blobs cvMerge(imgThreshRed, imgThreshRed, imgThreshRed, NULL, displayedImage); // RED CvPoint2D32f redCenter[whiteRedBlobs.GetNumBlobs()]; for (i = 0; i < whiteRedBlobs.GetNumBlobs(); i++) { currentBlob = whiteRedBlobs.GetBlob(i); px = (currentBlob->MaxX() + currentBlob->MinX()) / 2.0; py = (currentBlob->MaxY() + currentBlob->MinY()) / 2.0; redCenter[i] = cvPoint2D32f(px, py); #ifdef DEBUG_PRINT printf("%2.2f\t%2.2f\n", px, py); #endif if (currentBlob->Area() > areaBlob) { // Add Cross to the image currentBlob->FillBlob(displayedImage, CV_RGB(255, 0, 0)); cvCircle(displayedImage, cvPointFrom32f(redCenter[i]), 2, cvScalar(255, 0, 0), 10, 8, 0); } } // GREEN CvPoint2D32f greenCenter[whiteGreenBlobs.GetNumBlobs()]; for (i = 0; i < whiteGreenBlobs.GetNumBlobs(); i++) { currentBlob = whiteGreenBlobs.GetBlob(i); px = (currentBlob->MaxX() + currentBlob->MinX()) / 2.0; py = (currentBlob->MaxY() + currentBlob->MinY()) / 2.0; greenCenter[i] = cvPoint2D32f(px, py); #ifdef DEBUG_PRINT printf("%2.2f\t%2.2f\n", px, py); #endif if (currentBlob->Area() > areaBlob) { // Add Cross to the image currentBlob->FillBlob(displayedImage, CV_RGB(255, 0, 0)); cvCircle(displayedImage, cvPointFrom32f(greenCenter[i]), 2, cvScalar(0, 255, 0), 10, 8, 0); } } // Populating the list of potential robots potRobList.robNum = 0; for (i = 0; i < robMax; i++) potRobList.robList[i].active = 0; int redUsage[whiteRedBlobs.GetNumBlobs()]; int greenUsage[whiteGreenBlobs.GetNumBlobs()]; for (i = 0; i < whiteRedBlobs.GetNumBlobs(); i++) redUsage[i] = 0; for (j = 0; j < whiteGreenBlobs.GetNumBlobs(); j++) greenUsage[j] = 0; // Detect Robots float distCenter[whiteRedBlobs.GetNumBlobs()][whiteGreenBlobs.GetNumBlobs()]; for (i = 0; i < min(whiteRedBlobs.GetNumBlobs(), robMax); i++) { currentBlob = whiteRedBlobs.GetBlob(i); if (currentBlob->Area() > areaBlob) { for (j = 0; j < min(whiteGreenBlobs.GetNumBlobs(), robMax); j++) { currentBlob = whiteGreenBlobs.GetBlob(j); if (currentBlob->Area() > areaBlob) { distCenter[i][j] = computeDist(redCenter[i], greenCenter[j]); //printf("[%d] - [%d]: %2.2f\n", i, j, distCenter[i][j]); //printf("[%d] - [%d]: %2.2f\n", i, j, distCenter[i][j]); // Print a connection line if this could be a robot if (redUsage[i] == 0 && greenUsage[j] == 0 && checkDistMarker(distCenter[i][j], distMark)) { cvLine(displayedImage, cvPointFrom32f(redCenter[i]), cvPointFrom32f(greenCenter[j]), cvScalar(0, 255, 255), 2, 8, 0); // Check Robot potRobList.robList[potRobList.robNum] = createRobot(redCenter[i], greenCenter[j]); potRobList.robNum++; redUsage[i] = 1; greenUsage[j] = 1; // printRobot(potRobList.robList[potRobList.robNum - 1]); CvBox2D tmp; tmp.angle = potRobList.robList[potRobList.robNum - 1].orientation; tmp.center = potRobList.robList[potRobList.robNum - 1].center; tmp.size = cvSize2D32f(30, 50); cvEllipseBox(displayedImage, tmp, cvScalar(255, 255, 0), 4, 3, 0); // printRobot(potRobList.robList[potRobList.robNum-1]); } } } } } // Matching The List of Potential Robots with previous List of Robots // updateRobotListAndrea(&avRobList, potRobList); // updateRobotList(&avRobList, potRobList); makelistRobot(); /* // Print robots for (i = 0; i < robMax; i++) { if (avRobList.robList[i].active == 1) { CvBox2D tmp; tmp.angle = avRobList.robList[i].orientation; tmp.center = avRobList.robList[i].center; tmp.size = cvSize2D32f(50, 30); cvEllipseBox(displayedImage, tmp, cvScalar(255, 255, 0), 4, 3, 0); printRobot(avRobList.robList[i]); } } */ /* Control Law */ return displayedImage; }
bool findBiggestBlobImage(IplImage* img, int color, IplImage* &output) { CBlobResult blobs; CBlob *currentBlob; blobs = CBlobResult( img, NULL, 0 ); blobs.Filter( blobs, B_EXCLUDE, CBlobGetArea(), B_LESS, m_minBlobSize ); double biggestArea = m_minBlobSize; int biggestBlob = -1; for (int i = 0; i < blobs.GetNumBlobs(); i++ ) { currentBlob = blobs.GetBlob(i); double blobArea = currentBlob->Area(); if(blobArea > biggestArea) { biggestBlob = i; biggestArea = blobArea; } } if(biggestBlob >= 0) { int x = (int) blobs.GetBlob(biggestBlob)->MinX(); int y = (int) blobs.GetBlob(biggestBlob)->MinY(); int width= (int) blobs.GetBlob(biggestBlob)->MaxX()-x; int height= (int) blobs.GetBlob(biggestBlob)->MaxY()-y; IplImage* temp = cvCreateImage(cvGetSize(img),IPL_DEPTH_8U, 1); IplImage* temp2 = cvCreateImage(cvSize(width, height),IPL_DEPTH_8U, 1); IplImage* result = cvCreateImage(cvSize(width, height),IPL_DEPTH_8U, 1); if(biggestBlob>=0) blobs.GetBlob(biggestBlob)->FillBlob(temp,cvScalar(255),x,y); cvSetImageROI(temp, cvRect(x, y, width, height)); cvCopy(temp,temp2); uchar* tempData; uchar* resultData; tempData = (uchar *)(temp2->imageData); resultData = (uchar *) (result->imageData); for (int j = 0; j < width*height; j++) { if (tempData[j]==255) resultData[j] = color; else resultData[j] = 0; } cvResize(result, output); cvReleaseImage(&temp); cvReleaseImage(&temp2); cvReleaseImage(&result); return true; } else return false; }