Beispiel #1
0
INT
FASTCALL
MirrorRgnByWidth(
    _In_ HRGN hrgn,
    _In_ INT Width,
    _In_ HRGN *phrgn)
{
    INT cRgnDSize, Ret = 0;
    PRGNDATA pRgnData;

    cRgnDSize = NtGdiGetRegionData(hrgn, 0, NULL);

    if (cRgnDSize)
    {
        pRgnData = LocalAlloc(LMEM_FIXED, cRgnDSize * sizeof(LONG));
        if (pRgnData)
        {
            if ( GetRegionData(hrgn, cRgnDSize, pRgnData) )
            {
                HRGN hRgnex;
                UINT i;
                INT SaveL = pRgnData->rdh.rcBound.left;
                pRgnData->rdh.rcBound.left = Width - pRgnData->rdh.rcBound.right;
                pRgnData->rdh.rcBound.right = Width - SaveL;
                if (pRgnData->rdh.nCount > 0)
                {
                    PRECT pRect = (PRECT)&pRgnData->Buffer;
                    for (i = 0; i < pRgnData->rdh.nCount; i++)
                    {
                        SaveL = pRect[i].left;
                        pRect[i].left = Width - pRect[i].right;
                        pRect[i].right = Width - SaveL;
                    }
                }
                SortRects((PRECT)&pRgnData->Buffer, pRgnData->rdh.nCount);
                hRgnex = ExtCreateRegion(NULL, cRgnDSize , pRgnData);
                if (hRgnex)
                {
                    if (phrgn) phrgn = (HRGN *)hRgnex;
                    else
                    {
                        CombineRgn(hrgn, hRgnex, 0, RGN_COPY);
                        DeleteObject(hRgnex);
                    }
                    Ret = 1;
                }
            }
            LocalFree(pRgnData);
        }
    }
    return Ret;
}
Beispiel #2
0
/*
 * Main thread for Kinect input, vision processing, and network send - everything, really.
 */
void *cv_threadfunc (void *ptr) {
	// Images for openCV
	IplImage* timg = cvCloneImage(rgbimg); // Image we do our processing on
	IplImage* dimg = cvCloneImage(timg); // Image we draw on
	CvSize sz = cvSize( timg->width & -2, timg->height & -2);
	IplImage* outimg = cvCreateImage(sz, 8, 3);

	// Mem. mgmt. Remember to clear each time we run loop.
	CvMemStorage* storage = cvCreateMemStorage(0);

	// Set region of interest
	cvSetImageROI(timg, cvRect(0, 0, sz.width, sz.height));
	if (display) { cvSetImageROI(dimg, cvRect(0, 0, sz.width, sz.height)); }

	// Open network socket.
	CRRsocket = openSocket();
	if (CRRsocket < 0) pthread_exit(NULL);

	/*
	 * MAIN LOOP
	 */
	while (1) 
	{ 
		// Sequence to run ApproxPoly on
		CvSeq* polyseq = cvCreateSeq( CV_SEQ_KIND_CURVE | CV_32SC2, sizeof(CvSeq), sizeof(CvPoint), storage );
		CvSeq* contours; // Raw contours list
		CvSeq* hull; // Current convex hull
		int hullcount; // # of points in hull

		/* PULL RAW IMAGE FROM KINECT */
		pthread_mutex_lock( &mutex_rgb );
		if (display) { cvCopy(rgbimg, dimg, 0); }
		cvCopy(rgbimg, timg, 0);
		pthread_mutex_unlock( &mutex_rgb );

		/* DILATE */
		IplConvKernel* element = cvCreateStructuringElementEx(3, 3, 1, 1, 0);
		IplConvKernel* element2 = cvCreateStructuringElementEx(5, 5, 2, 2, 0);
		cvDilate(timg, timg, element2, 1);
		cvErode(timg, timg, element, 1);

		/* THRESHOLD*/
		cvThreshold(timg, timg, 100, 255, CV_THRESH_BINARY);

		/* OUTPUT PROCESSED OR RAW IMAGE (FindContours destroys image) */
		if (display) { cvCvtColor(dimg, outimg, CV_GRAY2BGR); }

		/* CONTOUR FINDING */
		cvFindContours(timg, storage, &contours, sizeof(CvContour), CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, cvPoint(0,0));
 
		/* CONVEX HULL + POLYGON APPROXIMATION + CONVERT TO RECTANGLE + FILTER FOR INVALID RECTANGLES */ 
		// Store points to draw line between
		CvPoint* draw1;
		CvPoint* draw2; 
		vector<PolyVertices> rectangleList;

		while (contours) // Run for all polygons
		{
			// List of raw rectangles
			PolyVertices fullrect;

			// Filter noise
			if (fabs(cvContourArea(contours, CV_WHOLE_SEQ)) > 600)
			{
				// Get convex hull
				hull = cvConvexHull2( contours, storage, CV_CLOCKWISE, 1 );
				hullcount = hull->total;

				// Draw hull (red line)
				if (display) {
					draw1 = (CvPoint*)cvGetSeqElem(hull, hullcount - 1);
					for (int i = 0; i < hullcount; i++)
					{
						draw2 = (CvPoint*)cvGetSeqElem( hull, i );
						cvLine( outimg, *draw1, *draw2, CV_RGB(255,0,0), 1, 8, 0 );
						draw1 = draw2;
					}
				}

				// Convert polys from convex hull to rectangles, fill list
				polyToQuad(hull, &fullrect, outimg);

				// Filter for bad rectangles
				if(!(fullrect.points[0] == NULL || fullrect.points[1] == NULL || 
					fullrect.points[2] == NULL || fullrect.points[3] == NULL)
					&& !fullrect.isMalformed())
				{
					/* FILL rectangleList */
					rectangleList.push_back(fullrect);

					#ifdef DEBUG_MAIN
					printf("RESULT: (%d,%d), (%d,%d), (%d,%d), (%d,%d)\n",
						fullrect.points[0]->x, fullrect.points[0]->y, 
						fullrect.points[1]->x, fullrect.points[1]->y,
						fullrect.points[2]->x, fullrect.points[2]->y, 
						fullrect.points[3]->x, fullrect.points[3]->y);
					fflush(stdout);
					#endif
				}

			}
			cvClearSeq(polyseq);
			contours = contours->h_next;
		}

		/* FILTER OVERLAPPING RECTANGLES */
		FilterInnerRects(rectangleList);

		/* SORT INTO CORRECT BUCKET */
		SortRects(rectangleList);

		/* DRAW & PROCESS MATH; FILL SEND STRUCT */
		// TODO: Might want to make the math stuff static for efficiency.
		RobotMath robot;
		TrackingData outgoing;
		memset(&outgoing, 0, sizeof(TrackingData));

		// Fill packets
		// Packet fields are unsigned 16bit integers, so we need to scale them up
		// Currently both dist and angle scaled 100x (hundredths precision)
		// NOTE:
		// Currently correct results are only calculated by using bottom basket and constant for top.
		if (rectangleList[0].isValid())
		{
			outgoing.distHigh = 100 * robot.GetDistance(*(rectangleList[0].points[2]), *(rectangleList[0].points[3]), 0);
			outgoing.angleHigh = 100 * robot.GetAngle(*(rectangleList[0].points[2]), *(rectangleList[0].points[3]));
		}
//		if (rectangleList[1].isValid())
//		{
//			outgoing.distLeft = 100 * robot.GetDistance(*(rectangleList[1].points[2]), *(rectangleList[1].points[3]), 1);
//			outgoing.angleLeft = 100 * robot.GetAngle(*(rectangleList[1].points[2]), *(rectangleList[1].points[3]));
//		}
//		if (rectangleList[2].isValid())
//		{
//			outgoing.distRight = 100 * robot.GetDistance(*(rectangleList[2].points[2]), *(rectangleList[2].points[3]), 2);
//			outgoing.angleRight = 100 * robot.GetAngle(*(rectangleList[2].points[2]), *(rectangleList[2].points[3]));
//		}
		if (rectangleList[3].isValid())
		{
			outgoing.distLow = 100 * robot.GetDistance(*(rectangleList[3].points[2]), *(rectangleList[3].points[3]), 3);
			outgoing.angleLow = 100 * robot.GetAngle(*(rectangleList[3].points[2]), *(rectangleList[3].points[3]));
		}

		// Draw filtered rects (thick blue line)
		if (display) {
			for (int i = 0; i < 4; i++)
			{
				if (outimg && rectangleList[i].isValid())
				{
					cvLine( outimg, *(rectangleList[i].points[3]), *(rectangleList[i].points[2]), CV_RGB(0,0,255), 2, 8, 0 );
					cvLine( outimg, *(rectangleList[i].points[2]), *(rectangleList[i].points[0]), CV_RGB(0,0,255), 2, 8, 0 );
					cvLine( outimg, *(rectangleList[i].points[0]), *(rectangleList[i].points[1]), CV_RGB(0,0,255), 2, 8, 0 );
					cvLine( outimg, *(rectangleList[i].points[1]), *(rectangleList[i].points[3]), CV_RGB(0,0,255), 2, 8, 0 );
				}
			}
		}

		#ifdef DEBUG_MAIN
		printf("Top distance: %d\n", outgoing.distHigh);
		printf("Top angle: %d\n\n", outgoing.angleHigh);
		#endif

		CvPoint cent1 = cvPoint(320, 0);
		CvPoint cent2 = cvPoint(320, 480);
 		if (display) { cvLine( outimg, cent1, cent2, CV_RGB(0,255,0), 1, 8, 0 ); }

		/* SEND TO CRIO */
		sendData(&outgoing, CRRsocket);

		if( cvWaitKey( 15 )==27 )
		{
			// Empty for now.
		}

		/* DISPLAY */
		if (display) { cvShowImage (FREENECTOPENCV_WINDOW_N,outimg); }
		
		/* CLEANUP */
		cvClearMemStorage(storage);
	}
	pthread_exit(NULL);
}