示例#1
0
void OpenCVAdapter::Loop(){
	f_loop=true;
	cvNamedWindow (windowNameCap.c_str(), CV_WINDOW_AUTOSIZE);
	cvNamedWindow (windowNameThr.c_str(), CV_WINDOW_AUTOSIZE);
	HWND hWndCap=(HWND)cvGetWindowHandle(windowNameCap.c_str());
	HWND hWndThr=(HWND)cvGetWindowHandle(windowNameThr.c_str());
	cvCreateTrackbar(trackbarName.c_str(),windowNameThr.c_str(),&trackbarPosition,100,NULL);
	cvCreateTrackbar(trackbarName2.c_str(),windowNameThr.c_str(),&trackbarPosition2,200,NULL);

	label = new unsigned char[xnum*ynum];
	labeled = new short[xnum*ynum];

	int fc=0;

	for(;f_loop;){
		if(f_start){
			GetFrame();
			LabelFrame();
			ProcFrame();
			cvShowImage (windowNameThr.c_str(), frame);
		}
		cvWaitKey (1000/fps);

	}
	SAFE_DELETE_ARRAY(label);
	SAFE_DELETE_ARRAY(labeled);

	cvDestroyWindow (windowNameCap.c_str());
	cvDestroyWindow (windowNameThr.c_str());
}
示例#2
0
	/// Destructor.
        ~CobTofCameraViewerNode()
        {
		/// Do not release <code>m_GrayImage32F3</code>
		/// Do not release <code>xyz_image_32F3_</code>
		/// Image allocation is managed by Cv_Bridge object 
			if (xyz_image_8U3_) cvReleaseImage(&xyz_image_8U3_);
			if (grey_image_8U3_) cvReleaseImage(&grey_image_8U3_);

			if(cvGetWindowHandle("z data"))cvDestroyWindow("z data");
			if(cvGetWindowHandle("grey data"))cvDestroyWindow("grey data");
        }
/// Get image from the color cam and show it.
/// @return Return code
unsigned long ShowColorImage()
{
	IplImage* ColorImage;

	cvNamedWindow("ColorCamera");
	while (cvGetWindowHandle("ColorCamera"))
	{
		if(cvWaitKey(10)=='q') break;

		/// Uncomment when using <code>GetColorImage</code> instead of <code>GetColorImage2</code>
	    //ColorImage = cvCreateImage(cvSize(1388,1038),IPL_DEPTH_8U,3);
		if (colorCamera->GetColorImage2(&ColorImage) == libCameraSensors::RET_FAILED)
		//if (colorCamera->GetColorImage(ColorImage, true) == libCameraSensors::RET_FAILED)
		{
			std::cerr << "TestCameraSensors: Color image acquisition failed\n";
			getchar();
			return ipa_utils::RET_FAILED;
		}

		cvShowImage("ColorCamera", ColorImage);

		cvReleaseImage(&ColorImage);
	}
	return ipa_utils::RET_OK;
}
示例#4
0
int OpencvModule::DrawDepth(DepthMetaData& g_depthMD){

    if (!cvGetWindowHandle("Caremedia Kinect Viewer"))  // if(window has been closed)
    {
        if (windowopened) {windowopened=false; return 0; }
        else windowopened = true;
    }

    int key=0;
    //for opencv Mat, accessing buffer
    Mat depth16(480,640,CV_16UC1,(unsigned short*)g_depthMD.WritableData());
    depth16.convertTo(depth8,CV_8U,-255/4096.0,255);
    Pseudocolor->pseudocolor(depth8,rgbdepth);

    //CvFont font;
    //cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 1.0, 1.0, 0, 1, CV_AA);

    float aux=((float)g_depthMD.Timestamp())/1E6;
    QVariant time_double(aux);

    QTime t = videostarttime.addSecs((int)aux).addMSecs((int)(aux - (int)aux ) * 1000);

    float percent = (float)100*(float)g_depthMD.FrameID() / (float)NumFrames;

    QString a;

    putText(rgbdepth,"Time:"+t.toString().toStdString(), cvPoint(460,30),5,1,cvScalar(255, 255, 255, 0),1,1);

    putText(rgbdepth, a.setNum(percent,'f',2).append("%").toStdString(), cvPoint(5,30),6,0.6,cvScalar(255, 255, 255, 0),1,1);

    imshow("Caremedia Kinect Viewer",rgbdepth);

    key = waitKey(5);

}
示例#5
0
/**
* @function main
*/
int main( int argc, const char** argv )
{
	CvCapture* capture;
	IplImage* frame = 0;
	HWND hwnd;
	do{
		//Read the video stream
		capture = cvCaptureFromCAM(0);
		frame = cvQueryFrame( capture );

		// create a window to display detected faces
		cvNamedWindow("Mine's Player", CV_WINDOW_AUTOSIZE);

		// display face detections
		cvShowImage("Mine's Player", frame);

		hwnd = (HWND)cvGetWindowHandle("Mine's Player");
		if( cvWaitKey(1) == 27 ) break;
	}while(IsWindowVisible(hwnd));

	// clean up and release resources
	cvReleaseImage(&frame);

	return 0;
}
/// Get range image, convert and show it.
/// @return Return code
unsigned long ShowRangeImage()
{
	IplImage* RangeImage = cvCreateImage(cvSize(176, 144), IPL_DEPTH_32F, 1);

	cvNamedWindow("Range image");

	while(cvGetWindowHandle("Range image"))
	{
		if(cvWaitKey(10)=='q') break;

		/// Get image
		if(rangeImagingSensor->AcquireImages(RangeImage) & ipa_utils::RET_FAILED)
		{	
			std::cerr << "ShowRangeImage: Range image acquisition failed." << std::endl;
			return ipa_utils::RET_FAILED;	
		}

		/// Process image
		IplImage* OutputImage = cvCreateImage(cvSize(RangeImage->width, RangeImage->height), IPL_DEPTH_8U, 3);
		ConvertToShowImage(RangeImage, OutputImage);
		
		/// Show image
		cvShowImage("Range image", OutputImage);
	}
	cvReleaseImage(&RangeImage);

	return ipa_utils::RET_OK;
}
static void signalState_cb(const autoware_msgs::TrafficLight::ConstPtr& msg)
{
  const int   fontFace      = cv::FONT_HERSHEY_COMPLEX;
  const float fontScale     = 1.0f;
  const int   fontThickness = 1;
  int         baseLine      = 0;
  cv::Point   textOrg;
  std::string label;
  cv::Scalar  textColor;
  cv::Scalar  signalColor;

  switch (msg->traffic_light) {
  case TRAFFIC_LIGHT_RED:
    label       = "RED";
    textColor   = CV_RGB(255, 0, 0);
    signalColor = CV_RGB(255, 0, 0);
    break;
  case TRAFFIC_LIGHT_GREEN:
    label       = "GREEN";
    textColor   = CV_RGB(0, 255, 0);
    signalColor = CV_RGB(0, 255, 0);
    break;
  default:
    label       = "NO SIGNAL DETECTED";
    textColor   = CV_RGB(255, 255, 255);
    signalColor = CV_RGB(0, 0, 0);
    break;
  }

  cv::Mat result(WINDOW_SIZE, WINDOW_SIZE, CV_8UC3, cv::Scalar(0));

  cv::circle(result, cv::Point(WINDOW_SIZE/2, WINDOW_SIZE/2), RADIUS, signalColor, CV_FILLED);

  cv::Size textSize = cv::getTextSize(label,
                                      fontFace,
                                      fontScale,
                                      fontThickness,
                                      &baseLine);

  textOrg = cv::Point(0, textSize.height + baseLine);

  cv::putText(result,
              label,
              textOrg,
              fontFace,
              fontScale,
              textColor,
              fontThickness,
              CV_AA);

  if (cvGetWindowHandle(WINDOW_NAME) != NULL) // Guard not to write destroyed window by using close button on the window
    {
      cv::imshow(WINDOW_NAME, result);
      cv::waitKey(5);
    }


}
示例#8
0
void EmbedCvWindow( HWND pWnd, CString strWndName, int w, int h )
{
	cvNamedWindow(strWndName, 0);
	HWND hWnd = (HWND) cvGetWindowHandle(strWndName);
	HWND hParent = ::GetParent(hWnd);
	::SetParent(hWnd, pWnd); // 嵌入到pWnd窗口
	::ShowWindow(hParent, SW_HIDE);
	::SetWindowPos(pWnd, NULL, 0,0, w,h, SWP_NOMOVE | SWP_NOZORDER);
	cvResizeWindow(strWndName, w,h);
}
示例#9
0
tVoid cRoadSigns::drawMatchesOfSign()
{
#if defined(WIN32)
	if (m_state)
		{
		cv::imshow("matching result",m_img_matches);
		HWND hWnd = (HWND)cvGetWindowHandle("matching result"); 
		::SendMessage(hWnd, WM_PAINT, 0, 0);
		}
#endif
}
/** Retrieves the window dimensions of an opencv window.
 * @param winname The window's name as a c string.
 * @return A pair of that stores first the window's height and second the window's width in pixels.
 *         Returns undefined values in case the window with the given name could not be handled.
 */
std::pair<unsigned int, unsigned int> window_dimensions( const char* winname) {
    std::pair<unsigned int /*height*/, unsigned int /*width*/> ret;
    HWND hwnd = (HWND)cvGetWindowHandle( winname);
    assert( hwnd != 0 && "window handle must retrieve a valid window!");
    
    RECT rect;
	GetClientRect(hwnd, &rect);

    ret.first = rect.bottom - rect.top;
    ret.second = rect.right - rect.left;
    return ret;
}
示例#11
0
void ImageNodelet::onInit()
{
  ros::NodeHandle nh = getNodeHandle();
  ros::NodeHandle local_nh = getPrivateNodeHandle();

  // Command line argument parsing
  const std::vector<std::string>& argv = getMyArgv();
  // First positional argument is the transport type
  std::string transport;
  local_nh.param("image_transport", transport, std::string("raw"));
  for (int i = 0; i < (int)argv.size(); ++i)
  {
    if (argv[i][0] != '-')
    {
      transport = argv[i];
      break;
    }
  }
  NODELET_INFO_STREAM("Using transport \"" << transport << "\"");
  // Internal option, should be used only by the image_view node
  bool shutdown_on_close = std::find(argv.begin(), argv.end(),
                                     "--shutdown-on-close") != argv.end();

  // Default window name is the resolved topic name
  std::string topic = nh.resolveName("image");
  local_nh.param("window_name", window_name_, topic);

  bool autosize;
  local_nh.param("autosize", autosize, false);
  
  std::string format_string;
  local_nh.param("filename_format", format_string, std::string("frame%04i.jpg"));
  filename_format_.parse(format_string);

  cv::namedWindow(window_name_, autosize ? cv::WND_PROP_AUTOSIZE : 0);
  cv::setMouseCallback(window_name_, &ImageNodelet::mouseCb, this);
  
#ifdef HAVE_GTK
  // Register appropriate handler for when user closes the display window
  GtkWidget *widget = GTK_WIDGET( cvGetWindowHandle(window_name_.c_str()) );
  if (shutdown_on_close)
    g_signal_connect(widget, "destroy", G_CALLBACK(destroyNode), NULL);
  else
    g_signal_connect(widget, "destroy", G_CALLBACK(destroyNodelet), &sub_);
#endif

  // Start the OpenCV window thread so we don't have to waitKey() somewhere
  startWindowThread();

  image_transport::ImageTransport it(nh);
  image_transport::TransportHints hints(transport, ros::TransportHints(), getPrivateNodeHandle());
  sub_ = it.subscribe(topic, 1, &ImageNodelet::imageCb, this, hints);
}
示例#12
0
// Capture live image stream (e.g., for adjusting object placement).
int camPreview(CvCapture* capture, struct slParams* sl_params, struct slCalib* sl_calib){

	// Create a window to display captured frames.
	IplImage* cam_frame  = cvQueryFrame2(capture, sl_params);
	IplImage* proj_frame = cvCreateImage(cvSize(sl_params->proj_w, sl_params->proj_h), IPL_DEPTH_8U, 1);
	cvNamedWindow("camWindow", CV_WINDOW_AUTOSIZE);
	cvCreateTrackbar("Cam. Gain",  "camWindow", &sl_params->cam_gain,  100, NULL);
	cvCreateTrackbar("Proj. Gain", "camWindow", &sl_params->proj_gain, 100, NULL);
	HWND camWindow = (HWND)cvGetWindowHandle("camWindow");
	BringWindowToTop(camWindow);
	cvWaitKey(1);

	// Capture live image stream.
	int cvKey = -1, cvKey_temp = -1;
	while(1){

		// Project white image.
		cvSet(proj_frame, cvScalar(255));
		cvScale(proj_frame, proj_frame, 2.*(sl_params->proj_gain/100.), 0);
		cvShowImage("projWindow", proj_frame);
		cvKey_temp = cvWaitKey(1);
		if(cvKey_temp != -1) 
			cvKey = cvKey_temp;

		// Capture next frame and update display window.
		cam_frame = cvQueryFrame2(capture, sl_params);
		cvScale(cam_frame, cam_frame, 2.*(sl_params->cam_gain/100.), 0);
		cvShowImageResampled("camWindow", cam_frame, sl_params->window_w, sl_params->window_h);
		cvKey_temp = cvWaitKey(10);
		if(cvKey_temp != -1) 
			cvKey = cvKey_temp;
		
		// Exit on user interaction.
		if(cvKey != -1)
			break;
	}

	// Project black image.
	cvZero(proj_frame);
	cvShowImage("projWindow", proj_frame);
	cvKey_temp = cvWaitKey(1);

	// Return without errors.
	cvDestroyWindow("camWindow");
	cvReleaseImage(&proj_frame);
	return 0;
}
示例#13
0
static void show(void)
{
	if(!existImage || !existPoints){
		return;
	}
	const auto& encoding = sensor_msgs::image_encodings::BGR8;
	cv_bridge::CvImagePtr cv_image = cv_bridge::toCvCopy(image_msg, encoding);
	IplImage frame = cv_image->image;

	cv::Mat matImage(&frame, false);

	int w = matImage.size().width;
	int h = matImage.size().height;
	int n = w * h;

	float min_d, max_d;
	min_d = max_d = points_msg->distance[0];
	for(int i=1; i<n; i++){
		float di = points_msg->distance[i];
		max_d = di > max_d ? di : max_d;
		min_d = di < min_d ? di : min_d;
	}
	float wid_d = max_d - min_d;

	for(int y=0; y<h; y++){
		for(int x=0; x<w; x++){
			int j = y * w + x;
			float distance = points_msg->distance[j];
			if(distance == 0){
				continue;
			}
			int colorid= wid_d ? ( (distance - min_d) * 255 / wid_d ) : 128;
			cv::Vec3b color=colormap.at<cv::Vec3b>(colorid);
			int g = color[1];
			int b = color[2];
			int r = color[0];
			cvRectangle(&frame, cvPoint(x, y), cvPoint(x+1, y+1), CV_RGB(r, g, b));
		}
	}

	if (cvGetWindowHandle(window_name) != NULL) // Guard not to write destroyed window by using close button on the window
	{
		cvShowImage(window_name, &frame);
		cvWaitKey(2);
	}
}
  RemotePtamm(const ros::NodeHandle& nh, const std::string& _transport)
  {
    topic_ = "/preview";
    ros::NodeHandle local_nh("~");
    local_nh.param("window_name", window_name_, topic_);

    transport_ = _transport;

    bool autosize;
    local_nh.param("autosize", autosize, false);
    cv::namedWindow(window_name_, autosize ? 1 : 0);

#ifdef HAVE_GTK
    // Register appropriate handler for when user closes the display window
    GtkWidget *widget = GTK_WIDGET( cvGetWindowHandle(window_name_.c_str()) );
    g_signal_connect(widget, "destroy", G_CALLBACK(destroyNode), NULL);
#endif

    sub_ = NULL;
    subscribe(nh);
  }
示例#15
0
void mouseEvent(int evt, int x, int y, int flags, void* param){
	HWND hw= (HWND)cvGetWindowHandle(windowName.c_str());
	windowName="x: "+ StringConverter::toString(x) + " , y: " + StringConverter::toString(y) ;
	SetWindowText(hw,(LPCTSTR)windowName.c_str());
}
示例#16
0
static void showImage()
{
    IplImage* image_clone = cvCloneImage(image);
    char distance_string[32];
    CvFont dfont;
    float hscale      = 0.7f;
    float vscale      = 0.7f;
    float italicscale = 0.0f;
    int  thickness    = 1;

    std::string objectLabel;
    CvFont      dfont_label;
    float       hscale_label = 0.5f;
    float       vscale_label = 0.5f;
    CvSize      text_size;
    int         baseline     = 0;

    cvInitFont(&dfont_label, CV_FONT_HERSHEY_COMPLEX, hscale_label, vscale_label, italicscale, thickness, CV_AA);
    objectLabel = car_fused_objects.type;
    cvGetTextSize(objectLabel.data(),
                  &dfont_label,
                  &text_size,
                  &baseline);

    /*
     * Plot obstacle frame
     */
    showRects(image_clone,
              car_fused_objects.obj,
              ratio,
              cvScalar(255.0,255.0,0.0));
    showRects(image_clone,
              pedestrian_fused_objects.obj,
              ratio,
              cvScalar(0.0,255.0,0.0));


    /*
     * Plot car distance data on image
     */
    for (unsigned int i = 0; i < car_fused_objects.obj.size(); i++) {
      if(!isNearlyNODATA(car_fused_objects.obj.at(i).range)) {
          int rect_x      = car_fused_objects.obj.at(i).rect.x;
          int rect_y      = car_fused_objects.obj.at(i).rect.y;
          int rect_width  = car_fused_objects.obj.at(i).rect.width;
          int rect_height = car_fused_objects.obj.at(i).rect.height;
          float range     = car_fused_objects.obj.at(i).range;

          /* put label */
          CvPoint labelOrg = cvPoint(rect_x - OBJ_RECT_THICKNESS,
                                     rect_y - baseline - OBJ_RECT_THICKNESS);
          cvRectangle(image_clone,
                      cvPoint(labelOrg.x + 0, labelOrg.y + baseline),
                      cvPoint(labelOrg.x + text_size.width, labelOrg.y - text_size.height),
                      CV_RGB(0, 0, 0), // label background color is black
                      -1, 8, 0
                      );
          cvPutText(image_clone,
                    objectLabel.data(),
                    labelOrg,
                    &dfont_label,
                    CV_RGB(255, 255, 255) // label text color is white
                    );

          /* put distance data */
            cvRectangle(image_clone,
                        cv::Point(rect_x + (rect_width/2) - (((int)log10(range/100)+1) * 5 + 45),
                                  rect_y + rect_height + 5),
                        cv::Point(rect_x + (rect_width/2) + (((int)log10(range/100)+1) * 8 + 38),
                                  rect_y + rect_height + 30),
                        cv::Scalar(255,255,255), -1);
            cvInitFont (&dfont, CV_FONT_HERSHEY_COMPLEX , hscale, vscale, italicscale, thickness, CV_AA);
            sprintf(distance_string, "%.2f m", range / 100); //unit of length is meter
            cvPutText(image_clone,
                      distance_string,
                      cvPoint(rect_x + (rect_width/2) - (((int)log10(range/100)+1) * 5 + 40),
                              rect_y + rect_height + 25),
                      &dfont,
                      CV_RGB(255, 0, 0));
        }
    }

    objectLabel = pedestrian_fused_objects.type;
    cvGetTextSize(objectLabel.data(),
                  &dfont_label,
                  &text_size,
                  &baseline);

    /*
     * Plot pedestrian distance data on image
     */
    for (unsigned int i = 0; i < pedestrian_fused_objects.obj.size(); i++) {
      if(!isNearlyNODATA(pedestrian_fused_objects.obj.at(i).range)) {
          int rect_x      = pedestrian_fused_objects.obj.at(i).rect.x;
          int rect_y      = pedestrian_fused_objects.obj.at(i).rect.y;
          int rect_width  = pedestrian_fused_objects.obj.at(i).rect.width;
          int rect_height = pedestrian_fused_objects.obj.at(i).rect.height;
          float range     = pedestrian_fused_objects.obj.at(i).range;

          /* put label */
          CvPoint labelOrg = cvPoint(rect_x - OBJ_RECT_THICKNESS,
                                     rect_y - baseline - OBJ_RECT_THICKNESS);
          cvRectangle(image_clone,
                      cvPoint(labelOrg.x + 0, labelOrg.y + baseline),
                      cvPoint(labelOrg.x + text_size.width, labelOrg.y - text_size.height),
                      CV_RGB(0, 0, 0), // label background color is black
                      -1, 8, 0
                      );
          cvPutText(image_clone,
                    objectLabel.data(),
                    labelOrg,
                    &dfont_label,
                    CV_RGB(255, 255, 255) // label text color is white
                    );

          /* put distance data */
            cvRectangle(image_clone,
                        cv::Point(rect_x + (rect_width/2) - (((int)log10(range/100)+1) * 5 + 45),
                                  rect_y + rect_height + 5),
                        cv::Point(rect_x + (rect_width/2) + (((int)log10(range/100)+1) * 8 + 38),
                                  rect_y + rect_height + 30),
                        cv::Scalar(255,255,255), -1);
            cvInitFont (&dfont, CV_FONT_HERSHEY_COMPLEX , hscale, vscale, italicscale, thickness, CV_AA);
            sprintf(distance_string, "%.2f m", range / 100); //unit of length is meter
            cvPutText(image_clone,
                      distance_string,
                      cvPoint(rect_x + (rect_width/2) - (((int)log10(range/100)+1) * 5 + 40),
                              rect_y + rect_height + 25),
                      &dfont,
                      CV_RGB(255, 0, 0));
        }
    }

    /*
     * Show image
     */
    if (cvGetWindowHandle(window_name.c_str()) != NULL) // Guard not to write destroyed window by using close button on the window
      {
        cvShowImage(window_name.c_str(), image_clone);
        cvWaitKey(2);
      }
    cvReleaseImage(&image_clone);
}
示例#17
0
/*
Checks if a HighGUI window is still open or not

@param name the name of the window we're checking

@return Returns 1 if the window named \a name has been closed or 0 otherwise
*/
int win_closed( char* win_name )
{
	if( ! cvGetWindowHandle(win_name) )
		return 1;
	return 0;
}
示例#18
0
int main(int argc, char* argv[])
{
	ros::init(argc, argv, NODE_NAME);

	ros::NodeHandle nh(NODE_NAME);

	Config conf(nh);

	std::cout << conf << std::endl;

	if (conf.features.size() == 0)
	{
		std::cout << "Error: no valid features specified in configuration." << std::endl;
		ros::shutdown();
		return EXIT_FAILURE;
	}

	image_transport::ImageTransport it(nh);

	image_transport::Subscriber sub = it.subscribe(conf.camera_topic, 1, imageCallback);

	frameWidth = conf.frameWidth;
	frameHeight = conf.frameHeight;

	FloatRect initBB = IntRect(conf.frameWidth / 2 - kLiveBoxWidth / 2, conf.frameHeight / 2 - kLiveBoxHeight / 2, kLiveBoxWidth, kLiveBoxHeight);

	std::cout << "press 'i' to initialise tracker" << std::endl;

	if (!conf.quietMode)
	{
		cv::startWindowThread();
		cv::namedWindow("result");
	}

	Tracker tracker(conf);

	cv::Mat result = cv::Mat(conf.frameHeight, conf.frameWidth, CV_8UC3);

	int currentFrame = -1;

	bool doInitialise = false;

	while(ros::ok())
	{
		if(currentFrame != frameNumber)
		{
			currentFrame = frameNumber;

			frame.copyTo(result);

			if(tracker.IsInitialised())
			{
				tracker.Track(frame);

				if (!conf.quietMode && conf.debugMode)
				{
					tracker.Debug();
				}

				rectangle(result, tracker.GetBB(), CV_RGB(0, 255, 0));
			}

			if (doInitialise)
			{
				if(tracker.IsInitialised())
				{
					tracker.Reset();
				}
				else
				{
					tracker.Initialise(frame, initBB);
				}

				doInitialise = false;
			}
			else if(!tracker.IsInitialised())
			{
				rectangle(result, initBB, CV_RGB(255, 255, 255));
			}
		}

		if(cvGetWindowHandle("result"))
		{
			cv::imshow("result", result);

			int key = cv::waitKey(1);
			if (key != -1)
			{
				if (key == 27 || key == 113) // esc q
				{
					break;
				}
				else if (key == 105)
				{
					doInitialise = true;
				}
			}
		}
		else
		{
			break;
		}

		ros::spinOnce();
	}

	cv::destroyAllWindows();
	ros::shutdown();

	return EXIT_SUCCESS;
}
void show(void)
{
  if(!existImage || !existPoints){
    return;
  }
  const auto& encoding = sensor_msgs::image_encodings::BGR8;
  cv_bridge::CvImagePtr cv_image = cv_bridge::toCvCopy(image_msg, encoding);
  IplImage frame = cv_image->image;

  cv::Mat matImage = cv::cvarrToMat(&frame);//(&frame, false);

  /* DRAW RECTANGLES of detected objects */
#if 0
  for(std::size_t i=0; i<cars.size();i++) {
      if(cars[i].y > matImage.rows*.3) { //temporal way to avoid drawing detections in the sky
          cvRectangle( &frame,
                       cvPoint(cars[i].x, cars[i].y),
                       cvPoint(cars[i].x+cars[i].width, cars[i].y+cars[i].height),
                       _colors[0], 3, 8,0 );
    }
  }
  for(std::size_t i=0; i<peds.size();i++) {
    if(peds[i].y > matImage.rows*.3) {
      cvRectangle( &frame,
                   cvPoint(peds[i].x, peds[i].y),
                   cvPoint(peds[i].x+peds[i].width, peds[i].y+peds[i].height),
                   _colors[1], 3, 8,0 );
    }
  }
#else
  drawRects(&frame,
            car_fused_objects.obj,
            cvScalar(255.0, 255.0, 0,0),
            matImage.rows*.10);

  drawRects(&frame,
            pedestrian_fused_objects.obj,
            cvScalar(0.0, 255.0, 0,0),
            matImage.rows*.10);
#endif
  /* PUT DISTANCE text on image */
  putDistance(&frame,
              car_fused_objects.obj,
              matImage.rows*.10,
              car_fused_objects.type.c_str());
  putDistance(&frame,
              pedestrian_fused_objects.obj,
              matImage.rows*.10,
              pedestrian_fused_objects.type.c_str());

  /* DRAW POINTS of lidar scanning */
  int w = matImage.size().width;
  int h = matImage.size().height;

  int n = w * h;
  float min_d = 1<<16, max_d = -1;
  //	int min_i = 1<<8, max_i = -1;
  for(int i=0; i<n; i++){
    int di = points_msg->distance[i];
    max_d = di > max_d ? di : max_d;
    min_d = di < min_d ? di : min_d;
    // int it = points_msg->intensity[i];
    // max_i = it > max_i ? it : max_i;
    // min_i = it < min_i ? it : min_i;
  }
  float wid_d = max_d - min_d;

  for(int y=0; y<h; y++){
    for(int x=0; x<w; x++){
      int j = y * w + x;
      double distance = points_msg->distance[j];
      if(distance == 0){
        continue;
      }
      int colorid= wid_d ? ( (distance - min_d) * 255 / wid_d ) : 128;
      cv::Vec3b color=colormap.at<cv::Vec3b>(colorid);
      int g = color[1];
      int b = color[2];
      int r = color[0];
      cvRectangle(&frame, cvPoint(x, y), cvPoint(x+1, y+1), CV_RGB(r, g, b));
    }
  }

  if (cvGetWindowHandle(window_name) != NULL) // Guard not to write destroyed window by using close button on the window
    {
      cvShowImage(window_name, &frame);
      cvWaitKey(2);
    }
}
示例#20
0
bool InitCVCAM(int c)
{
	printf("CamTest started ..\n");
	int cameras = cvcamGetCamerasCount();
	printf("Cameras detected: %d \n",cameras);

	if(c>=cameras)
		return false;
	int cameraSelected = -1;
	
	/*if(cameras>0)
		cameraSelected=0;*/
	if(c==-1)
	{
		int * out;
		int nselected = cvcamSelectCamera(&out);
		if(nselected>0)	cameraSelected = out[0];
	}
	else
		cameraSelected=c;

	if (cameraSelected > -1)
	{
		printf("The selected camera is camera number %d \n", cameraSelected);
		printf("Starting Camera %d \n",cameraSelected );
	// starting camera 1


	int h = 240;
	int w = 320;
	int t=0;
	cvcamSetProperty(cameraSelected,CVCAM_RNDWIDTH , &w);
	cvcamSetProperty(cameraSelected,CVCAM_RNDHEIGHT , &h);
	cvcamSetProperty(cameraSelected,CVCAM_PROP_ENABLE, &t);
	cvcamSetProperty(cameraSelected,CVCAM_PROP_RENDER, &t);
	//cvcamSetProperty(0,CVCAM_PROP_WINDOW, NULL);
	printf("It's working !!! \n");
	//Sleep(10000);
	//cvcamStop();
	//cvcamExit();
	//printf("Camera stopped. \n");

	}
	else 
	{
		printf("No Camera selected - terminating! \n");
		return false;
	}

	camimg=cvCreateImage(cvSize(camresx[resid],camresy[resid]), IPL_DEPTH_8U, 3);
	

	cvNamedWindow("cvcam", CV_WINDOW_AUTOSIZE);
	HWND hWnd = (HWND)cvGetWindowHandle(wincvcam);
	cvcamSetProperty(cameraSelected, CVCAM_PROP_WINDOW, &hWnd);
	cvMoveWindow(wincvcam,112,0);
	cvResizeWindow(wincvcam,320,240);
	cvcamSetProperty(cameraSelected, CVCAM_PROP_CALLBACK, grabframe);
	cvcamInit();
	cvcamStart();
	return true;
}