static void paint_voronoi( Mat& img, Subdiv2D& subdiv ) { vector<vector<Point2f> > facets; vector<Point2f> centers; subdiv.getVoronoiFacetList(vector<int>(), facets, centers); vector<Point> ifacet; vector<vector<Point> > ifacets(1); for( size_t i = 0; i < facets.size(); i++ ) { ifacet.resize(facets[i].size()); for( size_t j = 0; j < facets[i].size(); j++ ) ifacet[j] = facets[i][j]; Scalar color; color[0] = rand() & 255; color[1] = rand() & 255; color[2] = rand() & 255; fillConvexPoly(img, ifacet, color, 8, 0); ifacets[0] = ifacet; polylines(img, ifacets, true, Scalar(), 1, LINE_AA, 0); circle(img, centers[i], 3, Scalar(), FILLED, LINE_AA, 0); } }
void SquareOcl::draw_squares( Mat& image, const vector<vector<Point> >& squares ) { for( size_t i = 0; i < squares.size(); i++ ) { const Point* p = &squares[i][0]; int n = (int)squares[i].size(); polylines(image, &p, &n, 1, true, Scalar(0,255,0), 3, CV_AA); } }
static bool quadratic(nsfb_t *nsfb, nsfb_bbox_t *curve, nsfb_point_t *ctrla, nsfb_plot_pen_t *pen) { nsfb_point_t points[N_SEG]; if (pen->stroke_type == NFSB_PLOT_OPTYPE_NONE) return false; return polylines(nsfb, quadratic_points(N_SEG, points, curve, ctrla), points, pen); }
int main() { int gd=DETECT,gm,color,fill_color,boundary_color; int xco,yco; DWORD dwWidth = GetSystemMetrics(SM_CXSCREEN); DWORD dwHeight = GetSystemMetrics(SM_CYSCREEN); initwindow(dwWidth,dwHeight); int x0=dwWidth/2; int y0=dwHeight/2; int i,j,x[50],y[50]; printf("%d\t%d\n\n",x0,y0); for(i=0; i<dwHeight; i++) putpixel(x0,i,YELLOW); for(j=0; j<dwWidth; j++) putpixel(j,y0,YELLOW); int n; int h; boundary_color=15; printf("Enter the number of sides or edges\n"); scanf("%d",&n); for(i=0; i<n; i++) { printf("Enter the %dth coordinate\n",i+1); scanf("%d",&x[i]); scanf("%d",&y[i]); } x[n]=x[0]; y[n]=y[0]; for(i=0; i<n; i++) printf("%d\t%d\n",x[i],y[i]); while(z!=(n)) { polylines(x[z],y[z],x[z+1],y[z+1],x0,y0); z++; } getch(); cleardevice(); closegraph(); }
void tuiomultiplexer::drawGest(cv::Mat &out, TUIO::TuioCursor *tcur) { double minX=1000000.0; double minY=1000000.0; double maxX=-1000000.0; double maxY=-1000000.0; list<TuioPoint> path=tcur->getPath(); vector<Point2f> pathCV; vector<TuioPoint> pathToFix,pathFixed; for(list<TuioPoint>::iterator it=path.begin();it!=path.end();it++){ pathToFix.push_back(*it); } pathFixed=gu.DouglasPeucker(pathToFix,ui->minDistance->value(),0,pathToFix.size()-1); for(vector<TuioPoint>::iterator it=pathFixed.begin();it!=pathFixed.end();it++){ double x=(*it).getX(); double y=(*it).getY(); pathCV.push_back(Point2f(x,y)); minX=minX<x ? minX:x; minY=minY<y ? minY:y; maxX=maxX>x ? maxX:x; maxY=maxY>y ? maxY:y; } double scaleX=80.0/(maxX-minX); double scaleY=80.0/(maxY-minY); double scaleG=scaleX>scaleY ? scaleY:scaleX; Point* ge=new Point[pathCV.size()]; //Point ge[pathCV.size()]; int counter=0; for(vector<Point2f>::iterator it=pathCV.begin();it!=pathCV.end();it++){ ge[counter].x=(int)(((*it).x-minX) * scaleG)+10; ge[counter].y=(int)(((*it).y-minY) * scaleG)+10; counter++; } int *num=new int[1]; num[0]=pathCV.size()-1; const Point* ptt[1] = { ge }; polylines(out,ptt,num,1,0,cvScalar(255),3); delete num; delete ge; }
int main() { int gd=DETECT,gm; DWORD dwWidth = GetSystemMetrics(SM_CXSCREEN); DWORD dwHeight = GetSystemMetrics(SM_CYSCREEN); initwindow(dwWidth,dwHeight); int x0=dwWidth/2; int y0=dwHeight/2; int i,j,x[50],y[50]; for(i=0; i<dwHeight; i++) putpixel(x0,i,RED); for(j=0; j<dwWidth; j++) putpixel(j,y0,RED); int n; printf("Enter the number of coordinates or vertices\n"); scanf("%d",&n); for(i=0;i<n;i++) { printf("Enter the %dth coordinate\n",i+1); scanf("%d",&x[i]); scanf("%d",&y[i]); } for(i=0;i<n;i++) printf("%d\t%d\n",x[i],y[i]); while(z!=(n-1)) { polylines(x[z],y[z],x[z+1],y[z+1],x0,y0); z++; } delay(40000); cleardevice(); closegraph(); }
void MainWindow::mouseHandler(int event, int x, int y, int flags, void* param) { MainWindow* window = (MainWindow*) param; if (window->mode == 3) { x = bound(x, window->R, window->_src.cols-1-window->R); y = bound(y, window->R, window->_src.rows-1-window->R); } else { x = bound(x, 0, window->_src.cols-1); y = bound(y, 0, window->_src.rows-1); } // user press left button if (event == CV_EVENT_LBUTTONDOWN && !window->drag) { if (window->mode == 1) window->point = Point(x, y); else if (window->mode == 2) { window->points.clear(); window->points.push_back(Point(x, y)); } else if (window->mode == 3) { Rect box = Rect(x-window->R, y-window->R, 1+window->R*2, 1+window->R*2); Mat temp; medianBlur(window->_dst(box), temp, window->kernel); temp.copyTo(window->_dst(box), window->round_mask); window->round_mask.copyTo(window->zone(box), window->round_mask); imshow("Image", window->_dst); } window->drag = true; } // user drag the mouse if (event == CV_EVENT_MOUSEMOVE && window->drag) { Mat temp = window->_dst.clone(); if (window->mode == 1) rectangle(temp, window->point, Point(x, y), CV_RGB(255, 0, 0), 2, 8, 0); else if (window->mode == 2) { window->pts = (const Point*)Mat(window->points).data; window->npts = window->points.size(); window->points.push_back(Point(x, y)); polylines(temp, &window->pts, &window->npts, 1, false, Scalar(0,0,255), 2, 8, 0); } else if (window->mode == 3) { Rect box = Rect(x-window->R, y-window->R, 1+window->R*2, 1+window->R*2); Mat temp; medianBlur(window->_dst(box), temp, window->kernel); temp.copyTo(window->_dst(box), window->round_mask); window->round_mask.copyTo(window->zone(box), window->round_mask); imshow("Image", window->_dst); return; } imshow("Image", temp); } // user release left button if (event == CV_EVENT_LBUTTONUP && window->drag) { if (window->mode == 1) { Point temp(window->point); window->point.x = min(x, temp.x); x = max(x, temp.x); window->point.y = min(y, temp.y); y = max(y, temp.y); rectangle(window->zone, window->point, Point(x, y), Scalar(255), CV_FILLED, 8, 0); Rect box = Rect(window->point.x, window->point.y, x - window->point.x, y - window->point.y); if (window->style == 1) { medianBlur(window->_dst(box), window->_dst(box), window->kernel); medianBlur(window->_dst(box), window->_dst(box), window->kernel); } else { Mat mask = Mat::zeros(window->_src.size(), CV_8UC1); rectangle(mask, window->point, Point(x, y), CV_RGB(255, 255, 255), CV_FILLED, 8, 0); inpaint(window->_dst, mask, window->_dst, 5, INPAINT_TELEA); } } else if (window->mode == 2) { Rect box = boundingRect(window->points); vector< vector<Point> > contour; contour.push_back(window->points); Mat mask = Mat::zeros(window->_src.size(), CV_8UC1); fillPoly(mask, contour, Scalar(255)); fillPoly(window->zone, contour, Scalar(255)); if (window->style == 1) { Mat temp; medianBlur(window->_dst(box), temp, window->kernel); medianBlur(temp, temp, window->kernel); temp.copyTo(window->_dst(box), mask(box)); } else inpaint(window->_dst, mask, window->_dst, 5, INPAINT_TELEA); } imshow("Image", window->_dst); window->drag = false; } // user click right button: reset all if (event == CV_EVENT_RBUTTONUP) { window->_dst = window->_src.clone(); window->zone = Mat::zeros(window->_src.size(), CV_8UC1); window->drag = false; imshow("Image", window->_dst); } }
int main() { int gd=DETECT,gm,color,fill_color,boundary_color; int xco,yco; DWORD dwWidth = GetSystemMetrics(SM_CXSCREEN); DWORD dwHeight = GetSystemMetrics(SM_CYSCREEN); initwindow(dwWidth,dwHeight); int x0=dwWidth/2; int y0=dwHeight/2; int i,j,x[50],y[50]; printf("%d\t%d\n\n",x0,y0); for(i=0; i<dwHeight; i++) putpixel(x0,i,YELLOW); for(j=0; j<dwWidth; j++) putpixel(j,y0,YELLOW); int n; int h; boundary_color=15; printf("Enter the number of sides or edges\n"); scanf("%d",&n); for(i=0; i<n; i++) { printf("Enter the %dth coordinate\n",i+1); scanf("%d",&x[i]); scanf("%d",&y[i]); } x[n]=x[0]; y[n]=y[0]; for(i=0; i<n; i++) printf("%d\t%d\n",x[i],y[i]); while(z!=(n)) { polylines(x[z],y[z],x[z+1],y[z+1],x0,y0); z++; } int xin,yin; printf("Please enter an interior point of polygon\n"); scanf("%d",&xin); scanf("%d",&yin); printf("Please enter fill_color(except white=15)\n"); scanf("%d",&fill_color); int left,right; for(i=xin+x0;i<dwWidth;i++) { if(getpixel(i,y0-yin)==15) right++; } for(i=xin+x0;i>0;i--) { if(getpixel(i,y0-yin)==15) left++; } if(left%2==0 && right%2==0) printf("\npoint is outside\n"); else{ printf("point is inside\n"); //color=getpixel(xin+x0,y0-yin); fillPolygon(xin+x0,y0-yin,fill_color,boundary_color); getch(); cleardevice(); closegraph(); } }
void cameraSettings2d::hacerUpdate(){ ///ACA UPDATE DE TODO LO QUE SE NECESITA camUpdate(); if(isUpdated){ if(zoneSelection){ if(numSelectedPoints<4){ if(ui->glImage->isNewClick){ cConfig->zone[numSelectedPoints].x=ui->glImage->lastPos.x()*640/ui->glImage->width(); cConfig->zone[numSelectedPoints].y=ui->glImage->lastPos.y()*480/ui->glImage->height(); //cout << cConfig->zone[numSelectedPoints].x << "," << cConfig->zone[numSelectedPoints].y << endl; numSelectedPoints++; ui->glImage->isNewClick=false; ui->progressBar->setValue(numSelectedPoints*25); } } else{ numSelectedPoints=0; zoneSelection=false; cConfig->zoneSelected=true; ui->processLabel->setText("Idle process"); ui->progressBar->setValue(0); ui->Instructions->setText("Nothing to do..."); cConfig->maskZona.setTo(cv::Scalar::all(0)); fillConvexPoly(cConfig->maskZona,cConfig->zone,4,cvScalar(255)); Mat temp=cConfig->maskBack & cConfig->maskZona; ui->imgMaskBack->updateImage(false,temp); ui->imgMaskPlane->updateImage(false,cConfig->maskZona); } } if(mixImagesToColor()){ if(BackCalibration){ if(!cConfig->BM->proc(cConfig->variance,depthsS[nC],masks[nC],cConfig->Background,cConfig->maskBack)){ ui->progressBar->setValue(((float)cConfig->BM->nback/(cConfig->BM->total+1))*100); } else{ BackCalibration=false; ui->processLabel->setText("Idle process"); ui->progressBar->setValue(0); ui->Instructions->setText("Nothing to do..."); Mat temp0,temp1; cConfig->Background.convertTo(temp0,CV_8UC1,255); if(cConfig->zoneSelected) temp1=cConfig->maskBack & cConfig->maskZona; else temp1=cConfig->maskBack; ui->imgBack->updateImage(false,temp0); ui->imgMaskBack->updateImage(false,temp1); } } if(zoneSelection){ for(int cir=0;cir<numSelectedPoints;cir++) circle(mixs[nC],cConfig->zone[cir],10,Scalar(0,0,255),-1); } else if(cConfig->zoneSelected){ int *num=new int[1]; num[0]=4; const Point* ptt[1] = { cConfig->zone }; polylines(mixs[nC],ptt,num,1,1,cvScalar(255),2); delete num; } if(doPlaneCalculation){ cam->retrivePointCloud(nC,depths[nC],pCs[nC]); cConfig->VP->calcPlanoPC(pCs[nC],cConfig->maskZona,masks[nC],ui->progressBar); doPlaneCalculation=false; ui->processLabel->setText("Idle process"); ui->progressBar->setValue(0); ui->Instructions->setText("Nothing to do..."); } ui->glImage->updateImage(true,mixs[nC]); if(cConfig->method=="Background"){ sktP->processImage(depthsS[nC],toBlobs,masks[nC]); } else{ cam->retrivePointCloud(nC,depths[nC],pCs[nC]); sktP->processImage(pCs[nC],toBlobs,masks[nC]); } ui->glBlobs->updateImage(false,toBlobs); } if(refiningMask){ if(cConfig->method=="Background"){ cConfig->maskBack=(cConfig->maskBack==255) & (toBlobs==0); cConfig->maskZona=(cConfig->maskZona==255) & (toBlobs==0); ui->imgMaskBack->updateImage(false,cConfig->maskBack); } else{ cConfig->maskZona=(cConfig->maskZona==255) & (toBlobs==0); ui->imgMaskPlane->updateImage(false,cConfig->maskZona); } } } ////////////////////Procesamiento isUpdated=false; }
int main(int argc, char** argv) { ofstream f1; f1.open("result.txt"); size_t i,j; Point2f cp; cv::initModule_nonfree(); vector<Point2f> MP1,MP2; vector<int> trainIdxs, queryIdxs; //Read Video File VideoCapture cap("video1.avi"); if( !cap.isOpened() ) { cout << "Could not initialize capturing...\n"; return 0;} VideoWriter writer("ms_tracking.avi",CV_FOURCC('D','I','V','3'), 10,cvSize(640,480),1); cv::SURF mySURF; mySURF.extended = 0; Ptr<DescriptorMatcher> descriptorMatcher = DescriptorMatcher::create( "FlannBased" ); int mactherFilterType = getMatcherFilterType( "CrossCheckFilter" ); Mat frame,img1,img2; cap >> frame; if( frame.empty() ) return -1; img1 = frame.clone() ; Mat temp,temp1; if(img1.empty()) cout << "Exiting as the input image is empty" << endl; const char* name = "Initiate_ROI"; box = cvRect(-1,-1,0,0); cvNamedWindow( name,1); cvSetMouseCallback( name, my_mouse_callback2); // Main loop while( 1 ) { img1.copyTo(temp); if( drawing_poly) { for ( i=0; i < polyPoints.size(); i++) circle(temp, polyPoints[i], 2, Scalar(0,255,0), -1,8); } cv::imshow(name,temp) ; char c = (char)waitKey(10); if( c == '\x1b' ) // esc break; if(poly_drawn) break; } //Read the polygon points from a text file FILE *f11; polyPoints.clear(); IpolyPoints.clear(); f11 = fopen("points.txt","r"); Point a; for(int j=0;j<37;j++) { fscanf(f11,"%d",&(a.x)); fscanf(f11,"%d",&(a.y)); polyPoints.push_back(a); IpolyPoints.push_back(a); } fclose(f11); // Drawing Polygon Point pointArr[polyPoints.size()]; for (i=0; i< polyPoints.size(); i++) pointArr[i] = polyPoints[i]; const Point* pointsArray[1] = {pointArr}; int nCurvePts[1] = { polyPoints.size() }; polylines(temp, pointsArray, nCurvePts, 1, 1, Scalar(0,255,0), 1); cout << polyPoints.size() << endl; box= boundingRect(polyPoints); //boxOrg = Rect(box.x-15, box.y-15, box.width+30, box.height+30); boxOuter = Rect(box.x-30, box.y-30, box.width+60, box.height+60); //box =boxOrg; // storing the initial selected Box, as "box" variable changes in consecutive matching boxP=box; Mat img1ROI, labels1, clusters1, descriptors,roidescriptors, descriptors1,bdescriptors, bmdescriptors; vector<int> reprojections; // number of reprojections per KP, size same as KP(incresing) vector<Point2f> points,points1,points2, Mpoints1,Mpoints2,bpoints,npoints1,npoints2; //bmpoints,tpoints; vector<KeyPoint> roikeypoints, bkeypoints,keypoints,keypoints1, keypoints2; draw_box(temp, box ); //Show InnerBox - This is used by the Mean-Shift Tracker draw_box(temp,boxOuter); //Show OuterBox - This is used for removing background points bpoints.clear(); //calculating keypoints and descriptors of the selected polygon in image roi //==============================================================================================// for(i=0;i<polyPoints.size();i++) { // cout << polyPoints[i] << endl; // polyPoints[i].x = polyPoints[i].x -boxOuter.x; polyPoints[i].y = polyPoints[i].y- boxOuter.y; } img1ROI = img1(boxOuter); points1.clear(); mySURF.detect(img1ROI, roikeypoints); KeyPoint::convert(roikeypoints, points); mySURF.compute(img1ROI, roikeypoints, roidescriptors); bdescriptors.release();bkeypoints.clear(); bcategorizePoints( points, bpoints,polyPoints, roikeypoints, roidescriptors, bkeypoints, bdescriptors); shiftPoints(bpoints,boxOuter); for(i=0;i<bpoints.size();i++) circle(temp, bpoints[i], 2, Scalar(0,255,0),2); vector<KeyPoint> tpkeypoints; Mat tpdescriptors; categorizePoints( points, points1,polyPoints, roikeypoints, roidescriptors, tpkeypoints, tpdescriptors); shiftPoints(points1, boxOuter); for(i=0;i<points1.size();i++) circle(temp, points1[i], 2, Scalar(0,0,255),2); //====================================================================================================// points1.clear(); Mat img2ROI; // tpkeypoints = keypoints1; tpdescriptors = descriptors1; cv::imshow(name,temp) ; imwrite("a.jpg",temp); cout << "BD_SIZE \t" << bdescriptors.rows << "\t" << "FD_SIZE \t" << tpdescriptors.rows << endl; // Mat newimg = img1ROI.clone(); // KeyPoint::convert(tpkeypoints, points1); // for(size_t i=0;i<points1.size();i++) // circle(newimg, points1[i], 2, Scalar(255,0,255),2); // imshow( "newimg", newimg ); // points1.clear(); waitKey(0); cvDestroyWindow( name ); int FG_mp, FG, BG_mp, BG, FG_BG, msI ; //Foreground matching points struct timeval t1, t2; for(int l=0;;l++) { gettimeofday(&t1, NULL); cv::kmeans(tpdescriptors, NOC, labels1, TermCriteria( CV_TERMCRIT_ITER + CV_TERMCRIT_EPS, 50, 1.0 ), 1, KMEANS_RANDOM_CENTERS, clusters1); cap >> frame; img2 = frame.clone() ; temp1 =frame.clone() ; if(img2.empty() ) { cout<< "Could not open image: " << endl ; break;} int flag=1; Mpoints1.clear(); Mat descriptors2; msI=0; meanShift(img1, img2, descriptorMatcher, mactherFilterType, tpkeypoints, tpdescriptors,keypoints2,descriptors2, clusters1, cp, flag, MP1,img2ROI,bkeypoints, bdescriptors, temp1,FG_mp, FG, BG_mp, BG, FG_BG,msI); //==========scaling================= float scale=1; // cout <<"MP1size \t" << MP1.size() <<endl; if(APPLY_SCALING) { vector<DMatch> filteredMatches; if(descriptors1.rows > 4 && descriptors2.rows > 4) { crossCheckMatching( descriptorMatcher, descriptors1, descriptors2, filteredMatches, 1 ); trainIdxs.clear(); queryIdxs.clear(); for( i = 0; i < filteredMatches.size(); i++ ) { queryIdxs.push_back(filteredMatches[i].queryIdx); trainIdxs.push_back(filteredMatches[i].trainIdx); } points1.clear(); points2.clear(); KeyPoint::convert(keypoints1, points1, queryIdxs); KeyPoint::convert(keypoints2, points2, trainIdxs); // cout << "point2size" << points2.size() << endl; //homography npoints1.clear();npoints2.clear(); Mpoints1.clear();Mpoints2.clear(); Mat H12, points1t; double ransacReprojThreshold = 10; if( ransacReprojThreshold >= 0 && points1.size() > 4) H12 = findHomography( Mat(points1), Mat(points2), CV_RANSAC, ransacReprojThreshold ); vector<char> matchesMask( filteredMatches.size(), 0 );// NONmatchesMask( filteredMatches.size(), 0 ); if( !H12.empty() ) { perspectiveTransform(Mat(points1), points1t, H12); double maxInlierDist = 10;//ransacReprojThreshold < 0 ? 3 : ransacReprojThreshold; for(i = 0; i < points1.size(); i++ ) { if( norm(points2[i] - points1t.at<Point2f>((int)i,0)) <= 5)// maxInlierDist ) // inlier { matchesMask[i] = 1; npoints2.push_back(points2[i]); npoints1.push_back(points1[i]); } } for(i=0; i<npoints2.size();i++) { for(j=0;j<MP1.size();j++) { double dist = norm(npoints2[i]-MP1[j]); // cout <<"dist \t" <<dist << endl; // waitKey(0); if(dist < 0.1) { Mpoints2.push_back(npoints2[i]); Mpoints1.push_back(npoints1[i]); break; } } } } Mat drawImg; drawMatches( img1ROI, keypoints1, img2ROI, keypoints2, filteredMatches, drawImg, CV_RGB(0, 255, 0), CV_RGB(0, 0, 255), matchesMask #if DRAW_RICH_KEYPOINTS_MODE , DrawMatchesFlags::DRAW_RICH_KEYPOINTS #endif ); imshow( "correspondance", drawImg ); cout << "npoints1.size \t" << Mpoints1.size() << "\t" << Mpoints2.size() << endl; if(Mpoints1.size() > 8) weightScalingAspect(Mpoints1,Mpoints2,&scale); } } img1=img2; img1ROI = img2ROI; boxOrg =box; keypoints1 = keypoints2; descriptors1 =descriptors2; box.x += box.width/2; box.y += box.height/2; box.height = round(boxOrg.height *scale); box.width = round(( float(boxOrg.width)/float(boxOrg.height) ) * box.height); box.x -= box.width/2; box.y -= box.height/2; boundaryCheckRect(box); cout <<"SCALE \t" << scale << endl; gettimeofday(&t2, NULL); double diff = (float)((t2.tv_sec * 1000000 + t2.tv_usec) - (t1.tv_sec * 1000000 + t1.tv_usec)); diff = diff/1000; cout <<"Time taken in mili sec \t" << diff<< endl; // cout << tpdescriptors.rows << endl; //cout <<"BD \t" << bdescriptors.rows << endl; f1 << l << "\t" << FG_mp << "\t" << BG_mp << "\t" << FG << "\t"<< msI << "\n"; cout << "l \t" << l << "\t" <<" msI \t"<< msI << endl; imshow("img2",temp1); writer << temp1; waitKey(0); // boxOrg = eBox; char c = (char)waitKey(10); if( c == '\x1b' ) // esc { cout << "Exiting ..." << endl; break; } } trajectory.close(); return 0; }
Mat ScreenDetector::getTransformationMatrix(Error& error) { bool approxFound = false; // convert image to HSV cvtColor(img, hsv, CV_BGR2HSV); // threshold the image inRange(hsv, hsvMin, hsvMax, thresholded); // Optimize threshold by reducing noise erode(thresholded, thresholded, getStructuringElement(MORPH_ELLIPSE, Size(erodeDilateSize, erodeDilateSize)) ); dilate( thresholded, thresholded, getStructuringElement(MORPH_ELLIPSE, Size(erodeDilateSize, erodeDilateSize)) ); dilate( thresholded, thresholded, getStructuringElement(MORPH_ELLIPSE, Size(erodeDilateSize, erodeDilateSize)) ); erode(thresholded, thresholded, getStructuringElement(MORPH_ELLIPSE, Size(erodeDilateSize, erodeDilateSize)) ); GaussianBlur(thresholded, thresholded, Size(3,3), 0); Mat forContours; thresholded.copyTo(forContours); // find all contours Contours contours; Contour approximatedScreen; findContours(forContours, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE); int nbContours = contours.size(); cout << nbContours << " contours found, debug: " << DEBUG << endl; if(nbContours == 0) { error.setError("Unable to find the screen", "The camera doesn't detect any screen or green element." "Please check if your screen is turned on and directed toward the screen"); return img; } sort(contours.begin(), contours.end(), contour_compare_area); // find the contour with the biggest area that have 4 points when approximated for(int i=0; i < nbContours; ++i) { approxPolyDP(contours.at(i), approximatedScreen, approximateEpsilon * arcLength(contours.at(i), true), true); // our screen has 4 point when approximated if(approximatedScreen.size() == 4) { approxFound = true; break; } } if(!approxFound) { error.setError("Unable to find the screen properly", "It seems that the screen is not fully detectable by the camera. Try to reduce light in your room"); return img; } if(DEBUG) { namedWindow("debug", WINDOW_KEEPRATIO); namedWindow("thresholded_calibration", WINDOW_KEEPRATIO); Mat debug = Mat::zeros(img.rows, img.cols, CV_8UC3); polylines(debug, approximatedScreen, true, Scalar(0,0,255), 3); imshow("debug", debug); imshow("thresholded_calibration", thresholded); } return transformImage(approximatedScreen); }
static bool path(nsfb_t *nsfb, int pathc, nsfb_plot_pathop_t *pathop, nsfb_plot_pen_t *pen) { int path_loop; nsfb_point_t *pts; nsfb_point_t *curpt; int ptc = 0; nsfb_bbox_t curve; nsfb_point_t ctrla; nsfb_point_t ctrlb; int added_count = 0; int bpts; /* count the verticies in the path and add N_SEG extra for curves */ for (path_loop = 0; path_loop < pathc; path_loop++) { ptc++; if ((pathop[path_loop].operation == NFSB_PLOT_PATHOP_QUAD) || (pathop[path_loop].operation == NFSB_PLOT_PATHOP_CUBIC)) ptc += N_SEG; } /* allocate storage for the vertexes */ curpt = pts = malloc(ptc * sizeof(nsfb_point_t)); for (path_loop = 0; path_loop < pathc; path_loop++) { switch (pathop[path_loop].operation) { case NFSB_PLOT_PATHOP_QUAD: curpt-=2; added_count -= 2; curve.x0 = pathop[path_loop - 2].point.x; curve.y0 = pathop[path_loop - 2].point.y; ctrla.x = pathop[path_loop - 1].point.x; ctrla.y = pathop[path_loop - 1].point.y; curve.x1 = pathop[path_loop].point.x; curve.y1 = pathop[path_loop].point.y; bpts = quadratic_points(N_SEG, curpt, &curve, &ctrla); curpt += bpts; added_count += bpts; break; case NFSB_PLOT_PATHOP_CUBIC: curpt-=3; added_count -=3; curve.x0 = pathop[path_loop - 3].point.x; curve.y0 = pathop[path_loop - 3].point.y; ctrla.x = pathop[path_loop - 2].point.x; ctrla.y = pathop[path_loop - 2].point.y; ctrlb.x = pathop[path_loop - 1].point.x; ctrlb.y = pathop[path_loop - 1].point.y; curve.x1 = pathop[path_loop].point.x; curve.y1 = pathop[path_loop].point.y; bpts = cubic_points(N_SEG, curpt, &curve, &ctrla, &ctrlb); curpt += bpts; added_count += bpts; break; default: *curpt = pathop[path_loop].point; curpt++; added_count ++; break; } } if (pen->fill_type != NFSB_PLOT_OPTYPE_NONE) { polygon(nsfb, (int *)pts, added_count, pen->fill_colour); } if (pen->stroke_type != NFSB_PLOT_OPTYPE_NONE) { polylines(nsfb, added_count, pts, pen); } free(pts); return true; }
void drawLastPoly(int npoly) { const Point* curves[1] = {curve[npoly]}; int curvesPts[] = {NPOINTS}; polylines(Frame, curves, curvesPts, 1, (bool)POLYCLOSED, Scalar(255,255,255), POLYSTYLE ); imshow(WINDOW,Frame); }