Beispiel #1
0
vector< IplImage* > Interpreter::ExtractAllSymbolBlobs(IplImage* Source)
{
	/*
	* Source: Image to be ripped from
	*/

	vector< IplImage* > AllBlobImages;

	IplImage* LabelImage = cvCreateImage(cvGetSize(Source), IPL_DEPTH_LABEL, 1);

	CvBlobs Blobs;
	unsigned int Result = cvLabel(Source, LabelImage, Blobs);


	// Crop out all the symbols, yeah
	for (CvBlobs::const_iterator it = Blobs.begin(); it != Blobs.end(); ++it)
	{
		// AllBlobImages.push_back( ConvertToSquare( CropOutBlob(Source, it->second), 300) );
		AllBlobImages.push_back( CropOutBlob(Source, it->second) );
	}

	// This next part is going to be tricky. We have to take out all _important_ blobs. There are a couple ways to do this:
	// 1. By assuming that all symbols are going to be above a certain size - this gets rid of "noise"
	// 2. By keeping all pulled regions, and later discarding any with really low confidence values - possibly slower
	// ...
	// I'll sleep on it.
	return AllBlobImages;
}
CvBlobs MultiCursorAppCpp::labelingUserArea(Mat& src)
{
	// Make image dilating for stable labeling
#ifdef USE_KINECT_V1
	dilate(src, src, Mat(), Point(-1, -1), 3);
#else
	//dilate(src, src, Mat(), Point(-1, -1), 1);
#endif

	/* Use IplImage (Labeling for Mat is not fully implemented) */
	// Convert to IplImage
	IplImage srcIpl = (IplImage)src;
	// Convert to gray scale
	IplImage* srcIplBinary = cvCreateImage(cvGetSize(&srcIpl), IPL_DEPTH_8U, 1);
	cvCvtColor(&srcIpl, srcIplBinary, CV_BGR2GRAY);
	// Get binary image
	cvThreshold(srcIplBinary, srcIplBinary, 100, 255, CV_THRESH_BINARY);
	// Get blobs
	IplImage* labelImg = cvCreateImage(cvGetSize(srcIplBinary), IPL_DEPTH_LABEL, 1);
	
	CvBlobs blobs;
	UINT result = cvLabel(srcIplBinary, labelImg, blobs);

	Mat labelMatBuf(labelImg, true); // データをコピーする
	labelMat = labelMatBuf;
	CV_Assert(reinterpret_cast<uchar*>(labelImg->imageData) != labelMat.data);
	
	// Filter noise / ノイズ点の消去
	cvFilterByArea(blobs, 2000, 1000000);

	// Render blobs
	cvRenderBlobs(labelImg, blobs, &srcIpl, &srcIpl);

	// Free unused IplImages
	cvReleaseImage(&labelImg);
	cvReleaseImage(&srcIplBinary);
	
	return blobs;
}
/**
 * Runs the tracking.
 */
void Tracker::executeTracker()
{
	#define PI (3.1415926535897932384626433832795028841)
	// Kinect fow: 43° vertical, 57° horizontal
	double verticalScalingFactor = tan(43 * PI / 180) / 240;
	double horizontalScalingFactor = tan(57 * PI / 180) / 320;
	ROS_DEBUG("Scaling factors: %lf/%lf", horizontalScalingFactor, verticalScalingFactor);

	bool quadcopterTracked = false;

	// Images
	cv::Mat cameraImage(cv::Size(640, 480), CV_8UC3); // Only for showCameraImage == true.
	cv::Mat maskedImage(cv::Size(640, 480), CV_8UC3); // Only for showMaskedImage == true.
	cv::Mat image(cv::Size(640, 480), CV_8UC3); // The raw image from the camera.
	cv::Mat mapImage(cv::Size(640, 480), CV_8UC1); // The color mapped image.
	cv::Mat hsvImage(cv::Size(640, 480), CV_8UC3);  // The raw image in hsv format.

	// CvBlob
	cvb::CvBlobs blobs;
	IplImage *labelImg = cvCreateImage(image.size(), IPL_DEPTH_LABEL, 1);
	cv::Mat morphKernel = cv::getStructuringElement(CV_SHAPE_RECT, cv::Size(5, 5));
	cvb::CvTracks tracks;
	IplImage iplMapImage;

	while (!stopFlag) {
		if (imageDirty == 0) {
			usleep(100);
			continue;
		} else if (imageDirty > 1) {
			ROS_WARN("Skipped %d frames!", imageDirty - 1);
		}

		START_CLOCK(trackerClock)

		imageMutex.lock();
		((cv::Mat*) this->image)->copyTo(image);
		long int time = this->imageTime;
		imageDirty = 0;
		imageMutex.unlock();

		if (showCameraImage)
			image.copyTo(cameraImage);

		createColorMapImage(image, mapImage, hsvImage);

		if (showMaskedImage) {
			// Convert to 3 channel image.
			int target = 0;

			for (int i = 0; i < mapImage.total(); ++i) {
				maskedImage.data[target++] = mapImage.data[i];
				maskedImage.data[target++] = mapImage.data[i];
				maskedImage.data[target++] = mapImage.data[i];
			}
		}

		cv::morphologyEx(mapImage, mapImage, cv::MORPH_OPEN, morphKernel);

		// Finding blobs
		// Only copies headers.
		iplMapImage = mapImage;
		unsigned int result = cvLabel(&iplMapImage, labelImg, blobs);
		// ROS_DEBUG("Blob result: %d", result);

		// Filter blobs
		cvFilterByArea(blobs, 10, 1000000);

		if (showCameraImage || showMaskedImage)
			cvUpdateTracks(blobs, tracks, 200., 5);

		if (showMaskedImage) {
			// Only copies headers.
			IplImage iplImage = maskedImage;
			cvRenderBlobs(labelImg, blobs, &iplImage, &iplImage, CV_BLOB_RENDER_BOUNDING_BOX);
			cvRenderTracks(tracks, &iplImage, &iplImage, CV_TRACK_RENDER_ID | CV_TRACK_RENDER_BOUNDING_BOX);
			ROS_DEBUG("Exiting visual masked block"); // TODO Tracking down
			                                          // issue #7
		}

		if (showCameraImage) {
			// Only copies headers.
			IplImage iplImage = cameraImage;
			cvRenderBlobs(labelImg, blobs, &iplImage, &iplImage, CV_BLOB_RENDER_BOUNDING_BOX);
			cvRenderTracks(tracks, &iplImage, &iplImage, CV_TRACK_RENDER_ID | CV_TRACK_RENDER_BOUNDING_BOX);
			ROS_DEBUG("Exiting visual masked block"); // TODO Tracking down
			                                          // issue #7
		}

		if (showCameraImage || showMaskedImage)
			cvReleaseTracks(tracks);

		if (blobs.size() != 0) {
			// Find biggest blob
			cvb::CvLabel largestBlob = cvLargestBlob(blobs);
			CvPoint2D64f center = blobs.find(largestBlob)->second->centroid;
			double x = center.x;
			double y = center.y;

			// Set (0, 0) to center.
			x -= 320;
			y -= 240;
			// ROS_DEBUG("Center: %lf/%lf", x, y);

			// Apply scaling
			x *= horizontalScalingFactor;
			y *= verticalScalingFactor;

			dataReceiver->receiveTrackingData(cv::Scalar(x, y, 1.0), ((QuadcopterColor*) qc)->getId(), time);

			if (showMaskedImage)
				drawCross(maskedImage, center.x, center.y);

			if (showCameraImage)
				drawCross(cameraImage, center.x, center.y);

			if (!quadcopterTracked) {
				quadcopterTracked = true;
				ROS_DEBUG("Quadcopter %d tracked", ((QuadcopterColor*) this->qc)->getId());
			}
		} else if (quadcopterTracked) {
			quadcopterTracked = false;
			ROS_DEBUG("Quadcopter %d NOT tracked", ((QuadcopterColor*) this->qc)->getId());
		}

		// Free cvb stuff.
		cvReleaseBlobs(blobs);

		// ROS_DEBUG("cvb stuff freed"); // TODO Tracking down issue #7

		if (showMaskedImage) {
			cv::imshow(maskedWindowName, maskedImage);

			ROS_DEBUG("showed masked image"); // TODO Tracking down issue #7
		}

		if (showCameraImage) {
			cv::imshow(cameraWindowName, cameraImage);

			ROS_DEBUG("showed camera image"); // TODO Tracking down issue #7
		}

		STOP_CLOCK(trackerClock, "Calculation of quadcopter position took: ")
	}

	cvReleaseImage(&labelImg);

	if (showMaskedImage)
		cv::destroyWindow(maskedWindowName);

	if (showCameraImage)
		cv::destroyWindow(cameraWindowName);

	ROS_INFO("Tracker with id %d terminated", ((QuadcopterColor*) this->qc)->getId());
}