//----------------------------------------------------- void ofxSURFTracker::draw() { // Crosshairs ofNoFill(); ofSetColor(255, 255, 0); ofSetLineWidth(1); int d = 10; // length of croshairs ofDrawLine(0, 0, 0 + d, 0); ofDrawLine(0, 0, 0, 0 + d); ofDrawLine(width, 0, width - d, 0); ofDrawLine(width, 0, width, 0 + d); ofDrawLine(width, height, width - d, height); ofDrawLine(width, height, width, height - d); ofDrawLine(0, height, 0 + d, height); ofDrawLine(0, height, 0, height - d); if(bDrawImage) { ofSetColor(255); trackImg.draw(0,0); } if(bDrawResponses) { drawResponses(); } if(bDrawMatches) { drawMatches(); } if(bDrawFeatures) { drawFeatures(); } if(bDrawHomography) { drawHomoGraphy(); } }
void FeatureVisualization::featuresCallback(const feature_msgs::stereo_matchesConstPtr feature_locations_msg_) { cv::Mat left_img, right_img, left_output_img, right_output_img; sensor_msgs::Image* img_ptr = left_image_buffer_.retrieveImageFromCamInfo(feature_locations_msg_->l_camera_info); if(img_ptr != NULL) { left_img = convertSensorMsgToCV(*img_ptr); drawFeatures(left_img, feature_locations_msg_->lc, feature_locations_msg_->lp, left_output_img); left_image_publisher_.publish(convertCVToSensorMsg(left_output_img)); } stereo_msgs::DisparityImage* disp_ptr = disparity_buffer_.retrieveImageFromCamInfo(feature_locations_msg_->l_camera_info); // calculation of disparity sometimes fails // therefore just take the most recent one in the buffer when none is found for visualization if(disp_ptr == NULL) disp_ptr = disparity_buffer_.getLastImage(); if((disp_ptr != NULL) && (img_ptr != NULL)) overlayAndPublishDisparity(*disp_ptr, left_output_img); }
int main(int argc, char** argv) { init(argc, argv); if (argc != 3) { google::ShowUsageWithFlags(argv[0]); return 1; } std::string tracks_file = argv[1]; std::string image_format = argv[2]; std::string output_format = FLAGS_output_format; bool ok; TrackList<DrawerPointer> tracks; if (FLAGS_similarity) { // Load tracks. TrackList<SiftPosition> sift_tracks; SiftPositionReader feature_reader; ok = loadTrackList(tracks_file, sift_tracks, feature_reader); CHECK(ok) << "Could not load tracks"; // Convert SIFT features to generic drawable features. siftPositionTracksToDrawers(sift_tracks, tracks); } else if (FLAGS_scale) { // Load tracks. TrackList<ScaleSpacePosition> scale_tracks; ScaleSpacePositionReader feature_reader; ok = loadTrackList(tracks_file, scale_tracks, feature_reader); CHECK(ok) << "Could not load tracks"; // Convert SIFT features to generic drawable features. scaleFeatureTracksToDrawers(scale_tracks, tracks, FLAGS_radius); } else { // Load tracks. TrackList<cv::Point2d> point_tracks; ImagePointReader<double> feature_reader; ok = loadTrackList(tracks_file, point_tracks, feature_reader); CHECK(ok) << "Could not load tracks"; // Convert SIFT features to generic drawable features. translationTracksToDrawers(point_tracks, tracks, FLAGS_radius); } LOG(INFO) << "Loaded " << tracks.size() << " tracks"; // Make a list of random colors. typedef std::vector<cv::Scalar> ColorList; ColorList colors; for (int i = 0; i < int(tracks.size()); i += 1) { colors.push_back(randomColor(BRIGHTNESS, SATURATION)); } // Iterate through frames in which track was observed. TrackListTimeIterator<DrawerPointer> frame(tracks, 0); while (!frame.end()) { // Get the current time. int t = frame.t(); // Load the image. cv::Mat color_image; cv::Mat gray_image; ok = readImage(makeFilename(image_format, t), color_image, gray_image); CHECK(ok) << "Could not read image"; // Get the features. typedef std::map<int, DrawerPointer> FeatureSet; FeatureSet features; frame.getPoints(features); // Draw each one with its color. drawFeatures(color_image, features, colors); if (FLAGS_save) { std::string output_file = makeFilename(output_format, t); ok = cv::imwrite(output_file, color_image); CHECK(ok) << "Could not save image"; } if (FLAGS_display) { cv::imshow("tracks", color_image); cv::waitKey(10); } ++frame; } return 0; }