Exemple #1
0
void doTracking(std::vector<cv::LatentSvmDetector::ObjectDetection>& detections, int frameNumber,
		std::vector<kstate>& kstates, std::vector<bool>& active, cv::Mat& image,
		std::vector<kstate>& trackedDetections, std::vector<cv::Scalar> & colors)
{
	std::vector<cv::LatentSvmDetector::ObjectDetection> objects;
	//vector<LatentSvmDetector::ObjectDetection> tracked_objects;
	std::vector<bool> predict_indices;//this will correspond to kstates i
	std::vector<bool> correct_indices;//this will correspond to kstates i
	std::vector<int> correct_detection_indices;//this will correspond to kstates i, used to store the index of the corresponding object
	std::vector<bool> add_as_new_indices;//this will correspond to detections j

	//predict_indices.assign(kstates.size(), true);//always predict
	correct_indices.assign(kstates.size(), false);//correct only those matched
	correct_detection_indices.assign(kstates.size(), false);//correct only those matched
	add_as_new_indices.assign(detections.size(), true);//if the detection was not found add as new

	//Convert Bounding box coordinates from (x1,y1,w,h) to (BoxCenterX, BoxCenterY, width, height)
	objects = detections;//bboxToPosScale(detections);

	std::vector<int> already_matched;
	//compare detections from this frame with tracked objects
	for (unsigned int j = 0; j < detections.size(); j++)
	{
		for (unsigned int i = 0; i < kstates.size(); i++)
		{
			//compare only to active tracked objects(not too old)
			if (kstates[i].active)
			{
				//extend the roi 20%
				int new_x = (detections[j].rect.x - detections[j].rect.width*.1);
				int new_y = (detections[j].rect.y - detections[j].rect.height*.1);

				if (new_x < 0)			new_x = 0;
				if (new_x > image.cols)	new_x = image.cols;
				if (new_y < 0)			new_y = 0;
				if (new_y > image.rows) new_y = image.rows;

				int new_width = detections[j].rect.width*1.2;
				int new_height = detections[j].rect.height*1.2;

				if (new_width  + new_x > image.cols)	new_width  = image.cols - new_x;
				if (new_height + new_y > image.rows)	new_height = image.rows - new_y;

				cv::Rect roi_20(new_x, new_y, new_width, new_height);
				//cv::Rect roi(detections[j].rect);
				cv::Rect roi(roi_20);
				cv::Mat currentObjectROI = image(roi).clone();//Crop image and obtain only object (ROI)

				//cv::Rect intersect = detections[j].rect & kstates[i].pos;//check overlapping

				cv::Rect boundingbox;
				bool matched = false;
				//try to match with previous frame
				if ( !USE_ORB )
					matched = ( !alreadyMatched(j, already_matched) && crossCorr(kstates[i].image, currentObjectROI));
				else
					matched = (!alreadyMatched(j, already_matched) && orbMatch(currentObjectROI, kstates[i].image, boundingbox, ORB_MIN_MATCHES, ORB_KNN_RATIO));

				if(matched)
				{
					correct_indices[i] = true;//if ROI on this frame is matched to a previous object, correct
					correct_detection_indices[i] = j;//store the index of the detection corresponding to matched kstate
					add_as_new_indices[j] = false;//if matched do not add as new
					//kstates[i].image = currentObjectROI;//update image with current frame data
					kstates[i].score = detections[j].score;
					kstates[i].range = _ranges[j];
					already_matched.push_back(j);
				}//crossCorr

			}//kstates[i].active
		}//for (int i = 0; i < kstates.size(); i++)
	}//for (int j = 0; j < detections.size(); j++)


	//do prediction and correction for the marked states
	for (unsigned int i = 0; i < kstates.size(); i++)
	{
		if (kstates[i].active)//predict and correct only active states
		{
			//update params before predicting
			cv::setIdentity(kstates[i].KF.measurementMatrix);
			cv::setIdentity(kstates[i].KF.processNoiseCov, cv::Scalar::all(NOISE_COV));//1e-4
			cv::setIdentity(kstates[i].KF.measurementNoiseCov, cv::Scalar::all(MEAS_NOISE_COV));//1e-3
			cv::setIdentity(kstates[i].KF.errorCovPost, cv::Scalar::all(ERROR_ESTIMATE_COV));//100

			cv::Mat prediction = kstates[i].KF.predict();
			cv::Mat correction;
			kstates[i].pos.x = prediction.at<float>(0);
			kstates[i].pos.y = prediction.at<float>(1);
			kstates[i].pos.width = prediction.at<float>(2);
			kstates[i].pos.height = prediction.at<float>(3);
			kstates[i].real_data = 0;
			kstates[i].range = 0.0f;//fixed to zero temporarily as this is not real_data
			kstates[i].min_height = 0.0f;//fixed to zero temporarily as this is not real_data
			kstates[i].max_height = 0.0f;//fixed to zero temporarily as this is not real_data

			//now do respective corrections on KFs (updates)
			if (correct_indices[i])
			{
				//a match was found hence update KF measurement
				int j = correct_detection_indices[i];//obtain the index of the detection

				//cv::Mat_<float> measurement = (cv::Mat_<float>(2, 1) << objects[j].rect.x, //XY ONLY
				//												objects[j].rect.y);
				cv::Mat_<float> measurement = (cv::Mat_<float>(4, 1) << objects[j].rect.x,
					objects[j].rect.y,
					objects[j].rect.width,
					objects[j].rect.height);

				correction = kstates[i].KF.correct(measurement);//UPDATE KF with new info
				kstates[i].lifespan = DEFAULT_LIFESPAN; //RESET Lifespan of object

				//kstates[i].pos.width = objects[j].rect.width;//XY ONLY
				//kstates[i].pos.height = objects[j].rect.height;//XY ONLY

				//use real data instead of predicted if set
				kstates[i].pos.x = objects[j].rect.x;
				kstates[i].pos.y = objects[j].rect.y;
				kstates[i].pos.width = objects[j].rect.width;
				kstates[i].pos.height = objects[j].rect.height;
				kstates[i].real_data = 1;
				//cv::Mat im1 = image(kstates[i].pos);
				//cv::Mat im2 = image(objects[j].rect);
				kstates[i].range = _ranges[j];
				kstates[i].min_height = _min_heights[j];
				kstates[i].max_height = _max_heights[j];
			}


			//check that new widths and heights don't go beyond the image size
			if (kstates[i].pos.width + kstates[i].pos.x > image.cols)
				kstates[i].pos.width = image.cols - kstates[i].pos.x;
			if (kstates[i].pos.height + kstates[i].pos.y > image.rows)
				kstates[i].pos.height = image.rows - kstates[i].pos.y;

			//check that predicted positions are inside the image
			if (kstates[i].pos.x < 0)
				kstates[i].pos.x = 0;
			if (kstates[i].pos.x > image.cols)
				kstates[i].pos.x = image.cols;
			if (kstates[i].pos.y < 0)
				kstates[i].pos.y = 0;
			if (kstates[i].pos.y > image.rows)
				kstates[i].pos.y = image.rows;

			//remove those where the dimensions of are unlikely to be real
			if (kstates[i].pos.width > kstates[i].pos.height*4)
				kstates[i].active = false;

			if (kstates[i].pos.height > kstates[i].pos.width*2)
				kstates[i].active = false;

			kstates[i].lifespan--;//reduce lifespan
			if (kstates[i].lifespan <= 0)
			{
				kstates[i].active = false; //Too old, stop tracking.
			}
		}
	}

	//finally add non matched detections as new
	for (unsigned int i = 0; i < add_as_new_indices.size(); i++)
	{
		if (add_as_new_indices[i])
		{
			initTracking(objects[i], kstates, detections[i], image, colors, _ranges[i]);
		}
	}
	/*
	//check overlapping states and remove them
	float overlap = (OVERLAPPING_PERC/100);
	std::vector<unsigned int> removedIndices;
	for (unsigned int i = 0; i < kstates.size() ; i++)
	{
		for (unsigned int j = kstates.size() - 1; j > 0; j--)
		{
			if (i==j || isInRemoved(removedIndices, i) || isInRemoved(removedIndices, j))
				continue;
			//cout << "i:" << i << " j:" << j << endl;
			cv::Rect intersection = kstates[i].pos & kstates[j].pos;

			if ( ( (intersection.width >= kstates[i].pos.width * overlap) && (intersection.height >= kstates[i].pos.height * overlap) ) ||
				( (intersection.width >= kstates[j].pos.width * overlap) && (intersection.height >= kstates[j].pos.height * overlap) ) )
			{
				//if one state is overlapped by "overlap" % remove it (mark it as unused
				if (kstates[i].real_data && !(kstates[j].real_data))
				{
					kstates[j].active = false;
					removedIndices.push_back(j);
				}
				else if (!(kstates[i].real_data) && (kstates[j].real_data))
				{
					kstates[i].active = false;
					removedIndices.push_back(i);
				}
				else
				{
					kstates[j].active = false;
					removedIndices.push_back(j);
				}
			}
		}
	}*/
	ApplyNonMaximumSuppresion(kstates, OVERLAPPING_PERC);

	removeUnusedObjects(kstates);

	//return to x,y,w,h
	posScaleToBbox(kstates, trackedDetections);

}
Exemple #2
0
	void image_callback(const sensor_msgs::Image& image_source)
	{
		cv_bridge::CvImagePtr cv_image = cv_bridge::toCvCopy(image_source, sensor_msgs::image_encodings::BGR8);
		cv::Mat image_track = cv_image->image;
		cv::LatentSvmDetector::ObjectDetection empty_detection(cv::Rect(0,0,0,0),0,0);
		unsigned int i;

		std::vector<bool> tracker_matched(obj_trackers_.size(), false);
		std::vector<bool> object_matched(obj_detections_.size(), false);

		//check object detections vs current trackers
		for (i =0; i< obj_detections_.size(); i++)
		{
			for (unsigned int j = 0; j < obj_trackers_.size(); j++)
			{
				if (tracker_matched[j] || object_matched[i])
					continue;

				cv::LatentSvmDetector::ObjectDetection tmp_detection = obj_detections_[i];
				int area = tmp_detection.rect.width * tmp_detection.rect.height;
				cv::Rect intersection = tmp_detection.rect & obj_trackers_[j]->GetTrackedObject().rect;
				if ( (intersection.width * intersection.height) > area*0.3 )
				{

					obj_trackers_[j]->Track(image_track, obj_detections_[i], true);
					tracker_matched[j] = true;
					object_matched[i] = true;
					//std::cout << "matched " << i << " with " << j << std::endl;
				}
			}
		}

		//run the trackers not matched
		for(i = 0; i < obj_trackers_.size(); i++)
		{
			if(!tracker_matched[i])
			{
				obj_trackers_[i]->Track(image_track, empty_detection, false);
			}
		}

		//create trackers for those objects not being tracked yet
		for(unsigned int i=0; i<obj_detections_.size(); i++)
		{
			if (!object_matched[i])//if object wasn't matched by overlapping area, create a new tracker
			{
				if (num_trackers_ >10)
					num_trackers_=0;
				LkTracker* new_tracker = new LkTracker(++num_trackers_, min_heights_[i], max_heights_[i], ranges_[i]);
				new_tracker->Track(image_track, obj_detections_[i], true);

				//std::cout << "added new tracker" << std::endl;
				obj_trackers_.push_back(new_tracker);
			}
		}

		ApplyNonMaximumSuppresion(obj_trackers_, 0.3);

		//remove those trackers with its lifespan <=0
		std::vector<LkTracker*>::iterator it;
		for(it = obj_trackers_.begin(); it != obj_trackers_.end();)
		{
			if ( (*it)->GetRemainingLifespan()<=0 )
			{
				it = obj_trackers_.erase(it);
				//std::cout << "deleted a tracker " << std::endl;
			}
			else
				it++;
		}

		//copy results to ros msg
		unsigned int num = obj_trackers_.size();
		std::vector<cv_tracker_msgs::image_rect_ranged> rect_ranged_array;//tracked rectangles
		std::vector<int> real_data(num,0);//boolean array to show if data in rect_ranged comes from tracking or detection
		std::vector<unsigned int> obj_id(num, 0);//id number for each rect_range
		std::vector<unsigned int> lifespan(num, 0);//remaining lifespan of each rectranged
		for(i=0; i < num; i++)
		{
			cv_tracker_msgs::image_rect_ranged rect_ranged;
			LkTracker tracker_tmp = *obj_trackers_[i];
			rect_ranged.rect.x = tracker_tmp.GetTrackedObject().rect.x;
			rect_ranged.rect.y = tracker_tmp.GetTrackedObject().rect.y;
			rect_ranged.rect.width = tracker_tmp.GetTrackedObject().rect.width;
			rect_ranged.rect.height = tracker_tmp.GetTrackedObject().rect.height;
			rect_ranged.rect.score = tracker_tmp.GetTrackedObject().score;
			rect_ranged.max_height = tracker_tmp.max_height_;
			rect_ranged.min_height = tracker_tmp.min_height_;
			rect_ranged.range = tracker_tmp.range_;

			rect_ranged_array.push_back(rect_ranged);

			lifespan[i] = tracker_tmp.GetRemainingLifespan();
			obj_id[i] = tracker_tmp.object_id;
			if(lifespan[i]==tracker_tmp.DEFAULT_LIFESPAN_)
				real_data[i] = 1;

			cv::rectangle(image_track, tracker_tmp.GetTrackedObject().rect, cv::Scalar(0,255,0), 2);
		}

		//std::cout << "TRACKERS: " << obj_trackers_.size() << std::endl;

		obj_detections_.clear();

		cv_tracker_msgs::image_obj_tracked ros_objects_msg;
		ros_objects_msg.type = tracked_type_;
		ros_objects_msg.total_num = num;
		copy(rect_ranged_array.begin(), rect_ranged_array.end(), back_inserter(ros_objects_msg.rect_ranged)); // copy vector
		copy(real_data.begin(), real_data.end(), back_inserter(ros_objects_msg.real_data)); // copy vector
		copy(obj_id.begin(), obj_id.end(), back_inserter(ros_objects_msg.obj_id)); // copy vector
		copy(lifespan.begin(), lifespan.end(), back_inserter(ros_objects_msg.lifespan)); // copy vector

		ros_objects_msg.header = image_source.header;

		publisher_tracked_objects_.publish(ros_objects_msg);

		//cv::imshow("KLT",image_track);
		//cv::waitKey(1);

		ready_ = false;

	}