コード例 #1
0
/**
 * Filters the given image using the QuadcopterColor.
 * The result is written to mapImage. White pixels mean in range,
 * black pixels mean out of range.
 *
 * @param image The raw image.
 * @param mapImage The resulting mapped image. (Output array)
 * @param hsvImage The raw image in hsv format. (Output array)
 */
cv::Mat Tracker::createColorMapImage(cv::Mat &image, cv::Mat &mapImage, cv::Mat &hsvImage)
{
	START_CLOCK(convertColorClock)

	// This ensures that the mapImage has a buffer.
	// Since the buffer is never freed during tracking, this only allocates memory once.
	mapImage.reserve(480);

	cv::cvtColor(image, hsvImage, CV_BGR2HSV);

	STOP_CLOCK(convertColorClock, "Converting colors took: ")
	START_CLOCK(maskImageClock)

	uint8_t * current, *end, *source;
	int minHue, maxHue, minSaturation, minValue;

	QuadcopterColor *color = (QuadcopterColor*) qc;

	minHue = color->getMinColor().val[0];
	maxHue = color->getMaxColor().val[0];
	minSaturation = color->getMinColor().val[1];
	minValue = color->getMinColor().val[2];

	end = mapImage.data + mapImage.size().width * mapImage.size().height;
	source = hsvImage.data;

	if (minHue < maxHue)
		for (current = mapImage.data; current < end; ++current, source += 3) {
			if (*source > maxHue || *source < minHue || *(source + 1) < minSaturation || *(source + 2) < minValue)
				*current = 0;
			else
				*current = 255;
		}
	else
		// Hue interval inverted here.
		for (current = mapImage.data; current < end; ++current, source += 3) {
			if ((*source > maxHue && *source < minHue) || *(source + 1) < minSaturation || *(source + 2) < minValue)
				*current = 0;
			else
				*current = 255;
		}

	STOP_CLOCK(maskImageClock, "Image masking took: ")

	return mapImage;
}
コード例 #2
0
int main (void) {
    int i;

    CREATE_CLOCK(memory_dependency);

    START_CLOCK(memory_dependency);
    for (i = 0; i < STEPS; i++) { memory_block[0]++; memory_block[0]++; }
    STOP_CLOCK(memory_dependency);
    printf("same address, microseconds elapsed: %lu\n", CLOCK_MICROSECS_ELAPSED(memory_dependency));

    START_CLOCK(memory_dependency);
    for (i = 0; i < STEPS; i++) { memory_block[0]++; memory_block[1]++; }
    STOP_CLOCK(memory_dependency);
    printf("next address, microseconds elapsed: %lu\n", CLOCK_MICROSECS_ELAPSED(memory_dependency));

    return 0;
}
コード例 #3
0
/**
 * Runs the tracking.
 */
void Tracker::executeTracker()
{
	#define PI (3.1415926535897932384626433832795028841)
	// Kinect fow: 43° vertical, 57° horizontal
	double verticalScalingFactor = tan(43 * PI / 180) / 240;
	double horizontalScalingFactor = tan(57 * PI / 180) / 320;
	ROS_DEBUG("Scaling factors: %lf/%lf", horizontalScalingFactor, verticalScalingFactor);

	bool quadcopterTracked = false;

	// Images
	cv::Mat cameraImage(cv::Size(640, 480), CV_8UC3); // Only for showCameraImage == true.
	cv::Mat maskedImage(cv::Size(640, 480), CV_8UC3); // Only for showMaskedImage == true.
	cv::Mat image(cv::Size(640, 480), CV_8UC3); // The raw image from the camera.
	cv::Mat mapImage(cv::Size(640, 480), CV_8UC1); // The color mapped image.
	cv::Mat hsvImage(cv::Size(640, 480), CV_8UC3);  // The raw image in hsv format.

	// CvBlob
	cvb::CvBlobs blobs;
	IplImage *labelImg = cvCreateImage(image.size(), IPL_DEPTH_LABEL, 1);
	cv::Mat morphKernel = cv::getStructuringElement(CV_SHAPE_RECT, cv::Size(5, 5));
	cvb::CvTracks tracks;
	IplImage iplMapImage;

	while (!stopFlag) {
		if (imageDirty == 0) {
			usleep(100);
			continue;
		} else if (imageDirty > 1) {
			ROS_WARN("Skipped %d frames!", imageDirty - 1);
		}

		START_CLOCK(trackerClock)

		imageMutex.lock();
		((cv::Mat*) this->image)->copyTo(image);
		long int time = this->imageTime;
		imageDirty = 0;
		imageMutex.unlock();

		if (showCameraImage)
			image.copyTo(cameraImage);

		createColorMapImage(image, mapImage, hsvImage);

		if (showMaskedImage) {
			// Convert to 3 channel image.
			int target = 0;

			for (int i = 0; i < mapImage.total(); ++i) {
				maskedImage.data[target++] = mapImage.data[i];
				maskedImage.data[target++] = mapImage.data[i];
				maskedImage.data[target++] = mapImage.data[i];
			}
		}

		cv::morphologyEx(mapImage, mapImage, cv::MORPH_OPEN, morphKernel);

		// Finding blobs
		// Only copies headers.
		iplMapImage = mapImage;
		unsigned int result = cvLabel(&iplMapImage, labelImg, blobs);
		// ROS_DEBUG("Blob result: %d", result);

		// Filter blobs
		cvFilterByArea(blobs, 10, 1000000);

		if (showCameraImage || showMaskedImage)
			cvUpdateTracks(blobs, tracks, 200., 5);

		if (showMaskedImage) {
			// Only copies headers.
			IplImage iplImage = maskedImage;
			cvRenderBlobs(labelImg, blobs, &iplImage, &iplImage, CV_BLOB_RENDER_BOUNDING_BOX);
			cvRenderTracks(tracks, &iplImage, &iplImage, CV_TRACK_RENDER_ID | CV_TRACK_RENDER_BOUNDING_BOX);
			ROS_DEBUG("Exiting visual masked block"); // TODO Tracking down
			                                          // issue #7
		}

		if (showCameraImage) {
			// Only copies headers.
			IplImage iplImage = cameraImage;
			cvRenderBlobs(labelImg, blobs, &iplImage, &iplImage, CV_BLOB_RENDER_BOUNDING_BOX);
			cvRenderTracks(tracks, &iplImage, &iplImage, CV_TRACK_RENDER_ID | CV_TRACK_RENDER_BOUNDING_BOX);
			ROS_DEBUG("Exiting visual masked block"); // TODO Tracking down
			                                          // issue #7
		}

		if (showCameraImage || showMaskedImage)
			cvReleaseTracks(tracks);

		if (blobs.size() != 0) {
			// Find biggest blob
			cvb::CvLabel largestBlob = cvLargestBlob(blobs);
			CvPoint2D64f center = blobs.find(largestBlob)->second->centroid;
			double x = center.x;
			double y = center.y;

			// Set (0, 0) to center.
			x -= 320;
			y -= 240;
			// ROS_DEBUG("Center: %lf/%lf", x, y);

			// Apply scaling
			x *= horizontalScalingFactor;
			y *= verticalScalingFactor;

			dataReceiver->receiveTrackingData(cv::Scalar(x, y, 1.0), ((QuadcopterColor*) qc)->getId(), time);

			if (showMaskedImage)
				drawCross(maskedImage, center.x, center.y);

			if (showCameraImage)
				drawCross(cameraImage, center.x, center.y);

			if (!quadcopterTracked) {
				quadcopterTracked = true;
				ROS_DEBUG("Quadcopter %d tracked", ((QuadcopterColor*) this->qc)->getId());
			}
		} else if (quadcopterTracked) {
			quadcopterTracked = false;
			ROS_DEBUG("Quadcopter %d NOT tracked", ((QuadcopterColor*) this->qc)->getId());
		}

		// Free cvb stuff.
		cvReleaseBlobs(blobs);

		// ROS_DEBUG("cvb stuff freed"); // TODO Tracking down issue #7

		if (showMaskedImage) {
			cv::imshow(maskedWindowName, maskedImage);

			ROS_DEBUG("showed masked image"); // TODO Tracking down issue #7
		}

		if (showCameraImage) {
			cv::imshow(cameraWindowName, cameraImage);

			ROS_DEBUG("showed camera image"); // TODO Tracking down issue #7
		}

		STOP_CLOCK(trackerClock, "Calculation of quadcopter position took: ")
	}

	cvReleaseImage(&labelImg);

	if (showMaskedImage)
		cv::destroyWindow(maskedWindowName);

	if (showCameraImage)
		cv::destroyWindow(cameraWindowName);

	ROS_INFO("Tracker with id %d terminated", ((QuadcopterColor*) this->qc)->getId());
}