void babl_base_model_gray (void) { components (); models (); conversions (); formats (); }
int init (void) { types (); components (); models (); formats (); conversions (); return 0; }
void babl_fish_stats (FILE *file) { output_file = file; table_sum_processings_calc (); fprintf (output_file, "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n" "<html>\n" "<head>\n" "<title>BablFishPath introspection</title>\n" "<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">\n" "<style type='text/css'>" " body {" " font-family: sans;" " margin-left: 1em;" " }" " .cell {" " overflow : none;" " height: 1em;" " font-family: monospace;" " border: 1px solid #eee;" " padding: 0;" " margin : 0;" "}" ".cell>a {" " text-decoration: none;" " color: black;" " cursor: help;" "}" "div.tooltip {" " border: 0.2em solid black;" " padding-top: 1em;" " padding-right: 2em;" " display: none;" " padding-left: 2em;" " padding-bottom: 3em;" " background-color: white;" " background-repeat: no-repeat;" " background-image: url(graphics/babl-48x48.png);" " background-position: bottom right;" " color: black;" "}" " .cell>a:hover {" " background-color: black;" " color: white;" "}" " .format_name {" " height: 1em;" " background-color: #eee;" " padding-right: 0.5em;" " padding-left: 0.5em;" " border-bottom: 1px solid #fff;" "}" " .format_name>a {" " text-decoration: none;" " color: blue;" " cursor: help;" " }" " .format_name>a:hover {" " background-color: blue;" " color: white;" " }" "a:hover>div.tooltip {" " display: block;" " position: fixed;" " bottom: 0;" " right: 0;" "}" "td.component {" " background-color: #060;" " padding-left: 0.5em;" " padding-top: 0.1em;" " padding-bottom: 0.1em;" " overflow: hidden;" " width: 4em;" " color: white;" " border: 1px solid white;" "}" "td.type {" " background-color: #006;" " padding-left: 0.5em;" " padding-top: 0.1em;" " padding-bottom: 0.1em;" " overflow: hidden;" " width: 4em;" " color: white;" " border: 1px solid white;" "}" ".g {" " color: gray;" "}" ".r {" " text-align: right;" "}" "</style>" "<script type='text/javascript'>" "var tick_count=0;" "function o ()" "{" " tick_count++;" " if (tick_count == 11)" " alert(\"«The mind is it's own place,\\nand in itself can make a heaven of hell;\\na hell of heaven.»\\n--Milton\");" " else if (tick_count == 42)" " alert(\"«So long and thanks for all the fish.»\\n--Adams\");" "}" "</script>" "</head>\n"); fprintf (output_file, "<body>\n"); fprintf (output_file, "<h1>BablFishPath introspection</h1>"); fprintf (output_file, "<p>Instrumentation and pathlengths.</p>"); fprintf (output_file, "<table cellspacing='0'><tr><td>Source format</td><td colspan='32'>Destination formats</td></tr>\n"); babl_format_class_for_each (table_source_each, NULL); fprintf (output_file, "</table>"); fprintf (output_file, "<div style='height:20em'></div>\n"); conversions (); fprintf (output_file, "</body></html>\n"); }
pcl::PointCloud<briskDepth> depthBrisk(const sensor_msgs::ImageConstPtr& msg, const sensor_msgs::PointCloud2ConstPtr &depth) { pcl::PointCloud<briskDepth> depthFeatures; cv::Mat image(conversions(msg)); briskStruct briskObj; if(msg->encoding != "bgr8" ) { ROS_ERROR("Unsupported image encoding:"); return depthFeatures; // Return Null image } // Check image size big enough cv::Size s = image.size(); if(s.height < 1)return depthFeatures; // Convert sensor message pcl::PointCloud< pcl::PointXYZ > depthPoints; pcl::fromROSMsg(*depth,depthPoints); // Assigning stable BRISK constants int Thresh = 30; int Octave = 3; float PatternScales=1.0f; // Detect the keypoints using traditional BRISK Detector cv::BRISK briskDetector(Thresh, Octave,PatternScales); briskDetector.create("Feature2D.BRISK"); briskDetector.detect(image, briskObj.keypoints); // Extract Features briskDetector.compute(image, briskObj.keypoints, briskObj.descriptors); s = briskObj.descriptors.size(); if(s.height < 1)return depthFeatures; // Start Conversion to 3D for(int i = 0; i < s.height; i++) { int x = round(briskObj.keypoints[i].pt.x); int y = round(briskObj.keypoints[i].pt.y); // only permit featrues where range can be extracted if(!isnan(depthPoints.points[depthPoints.width*y+x].x) && !isnan(depthPoints.points[depthPoints.width*y+x].x) && !isnan(depthPoints.points[depthPoints.width*y+x].x)) { briskDepth temp; temp.x = depthPoints.points[depthPoints.width*y+x].x; temp.y = depthPoints.points[depthPoints.width*y+x].y; temp.z = depthPoints.points[depthPoints.width*y+x].z; temp.descriptor = briskObj.descriptors.row(i); depthFeatures.push_back(temp); } } BVisualiser.visualise(briskObj.keypoints, image); cv::waitKey(10); if(count == 0) { //brisk_lastKeypoints = brisk_currentKeypoints; //brisk_lastImg = brisk_currentImg; brisk_lastKeypoints = briskObj.keypoints; brisk_lastImg = image; } brisk_currentKeypoints = briskObj.keypoints; brisk_currentImg = image; count++; return depthFeatures; }