Beispiel #1
2
int main(int argc, char* argv[])
{
  #pragma omp master
	{
	#ifdef _OPENMP
		int nthreads = omp_get_num_threads();
		std::cout << "Using OpenMP - There are " << nthreads << " threads" << std::endl;
	#else
		std::cout << "Not using OpenMP" << '\n';
	#endif
	}


  // -------------------------------------------------------------------------------------
  // Create "tiy_log/" subdirectory (win) or "/home/<username>/tiy_log/" (linux)
  // -------------------------------------------------------------------------------------
  std::string log_file_directory = "tiy_log/";
#ifdef WIN32
#else
  log_file_directory = std::string(getpwuid(getuid())->pw_dir) + "/" + log_file_directory;
#endif
  boost::filesystem::path dir_path(log_file_directory);
  if (!boost::filesystem::is_directory(dir_path) && !boost::filesystem::create_directory(dir_path))
  {
	  std::cerr << "Could not create log subdirectory." << std::endl;
	  std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
	  return 0;
  }


  // -------------------------------------------------------------------------------------
  // Input ARG
  // -------------------------------------------------------------------------------------
  char *arg_camera_config_file = (char *)"config_camera.xml";
  char *arg_object_config_file = (char *)"config_object.xml";
  char *arg_run_parameter_config_file = (char *)"config_run_parameters.xml";

  if (argc == 1)
  {
    std::cerr << "USING DEFAULT CONFIG FILES:  config_camera.xml config_object.xml config_run_parameters.xml" << std::endl;
  }
  else if (argc!=1 && argc != 4)
  {
	std::cerr << "Usage: 	server <camera_config_file> <object_config_file> <run_parameters_config_file>" << std::endl;
	std::cerr << "default:  server config_camera.xml config_object.xml config_run_parameters.xml" << std::endl;
	std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
	return 0;
  }
  else
  {
	arg_camera_config_file = argv[0];
	arg_object_config_file = argv[1];
	arg_run_parameter_config_file = argv[2];
  }


  // -------------------------------------------------------------------------------------
  // Get Run Parameters from XML Config File
  // -------------------------------------------------------------------------------------
	cv::FileStorage input_file_storage;
	if (!input_file_storage.open(arg_run_parameter_config_file, cv::FileStorage::READ))
	{
		std::cerr << "could NOT open " << arg_run_parameter_config_file << std::endl;
		std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
		return 0;
	}

	int do_use_kalman_filter=-1, do_interactive_mode=-1, multicast_port=-1, do_show_graphics=-1,
		do_output_debug=-1, do_output_2D=-1, do_output_3D=-1, do_output_object=-1, do_output_virt_point=-1,
		do_log_2D=-1, do_log_3D=-1, do_log_object=-1, do_log_virt_point=-1, do_log_video=-1, do_log_frame=-1,
		do_send_object_pose=-1, do_send_virt_point_pose=-1;

	do_use_kalman_filter = (int)input_file_storage["do_use_kalman_filter"];
	do_interactive_mode = (int)input_file_storage["do_interactive_mode"];
	multicast_port = (int)input_file_storage["multicast_port"];
	do_show_graphics = (int)input_file_storage["do_show_graphics"];
	do_output_debug = (int)input_file_storage["do_output_debug"];
	do_output_2D = (int)input_file_storage["do_output_2D"];
	do_output_3D = (int)input_file_storage["do_output_3D"];
	do_output_object = (int)input_file_storage["do_output_object"];
	do_output_virt_point = (int)input_file_storage["do_output_virt_point"];
	do_log_2D = (int)input_file_storage["do_log_2D"];
	do_log_3D = (int)input_file_storage["do_log_3D"];
	do_log_object = (int)input_file_storage["do_log_object"];
	do_log_virt_point = (int)input_file_storage["do_log_virt_point"];
	do_log_video = (int)input_file_storage["do_log_video"];
	do_log_frame = (int)input_file_storage["do_log_frame"];
	do_send_object_pose = (int)input_file_storage["do_send_object_pose"];
	do_send_virt_point_pose = (int)input_file_storage["do_send_virt_point_pose"];

	std::string multicast_adress = (std::string)input_file_storage["multicast_adress"];
	std::string input_device_src = (std::string)input_file_storage["input_device_src"];	// (m: Mouse, k: Keyboard)
	std::string mouse_device_id = (std::string)input_file_storage["mouse_device_id"];
	std::string keyboard_device_id = (std::string)input_file_storage["keyboard_device_id"];
	std::string input_src = (std::string)input_file_storage["input_src"];	// (b: Basler Camera, o: OpenCV Camera, v: Video files, t: 2D point files)
	std::string video_left = (std::string)input_file_storage["video_left"];
	std::string video_right = (std::string)input_file_storage["video_right"];
	std::string points_2D_left = (std::string)input_file_storage["points_2D_left"];
	std::string points_2D_right = (std::string)input_file_storage["points_2D_right"];
	std::string log_points_2D_left = log_file_directory + (std::string)input_file_storage["log_points_2D_left"];
	std::string log_points_2D_right = log_file_directory + (std::string)input_file_storage["log_points_2D_right"];
	std::string log_points_3D = log_file_directory + (std::string)input_file_storage["log_points_3D"];
	std::string log_object_pose = log_file_directory + (std::string)input_file_storage["log_object_pose"];
	std::string log_virt_point_pose = log_file_directory + (std::string)input_file_storage["log_virt_point_pose"];
	std::string log_video_left = log_file_directory + (std::string)input_file_storage["log_video_left"];
	std::string log_video_right = log_file_directory + (std::string)input_file_storage["log_video_right"];
	std::string log_frame_left_prefix = log_file_directory + (std::string)input_file_storage["log_frame_left_prefix"];
	std::string log_frame_right_prefix = log_file_directory + (std::string)input_file_storage["log_frame_right_prefix"];

	input_file_storage.release();

	if (do_use_kalman_filter==-1 || do_interactive_mode==-1 || multicast_port==-1 || do_show_graphics==-1 ||
		do_output_debug==-1 || do_output_2D==-1 || do_output_3D==-1 || do_output_object==-1 || do_output_virt_point==-1 ||
		do_log_2D==-1 || do_log_3D==-1 || do_log_object==-1 || do_log_virt_point==-1 || do_log_video==-1 || do_log_frame==-1 || 
		do_send_object_pose==-1 || do_send_virt_point_pose==-1 ||
		multicast_adress.empty() || input_device_src.empty() || mouse_device_id.empty() || 
		keyboard_device_id.empty() || input_src.empty() || video_left.empty() || video_right.empty() || 
		points_2D_left.empty() || points_2D_right.empty() ||
		log_points_2D_left.empty() || log_points_2D_right.empty() || log_points_3D.empty() ||
		log_object_pose.empty() || log_virt_point_pose.empty() || 
		log_video_left.empty() || log_video_right.empty() ||
		log_frame_left_prefix.empty() || log_frame_right_prefix.empty())
	{
		std::cerr << "Read all run parameters from " << arg_run_parameter_config_file << " failed" << std::endl;
		std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
		return 0;
	}

	if (do_log_video && (input_src == "v"))
	{
		std::cerr << "Cannot read video files and record to files at the same time." << std::endl;
		std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
		return 0;
	}

	bool do_debugging = (do_output_debug != 0);


  // -------------------------------------------------------------------------------------
  // Initialize Motion Capturing (segmentation/marker extraction, marker template fitting)
  // -------------------------------------------------------------------------------------
  tiy::MarkerTracking m_track(do_debugging);

  if (!m_track.readConfigFiles(arg_camera_config_file, arg_object_config_file))
  {
	  std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
	  return 0;
  }


  // -------------------------------------------------------------------------------------
  // Input device
  // -------------------------------------------------------------------------------------
  boost::scoped_ptr<tiy::MouseDevice> mouse_device;
  boost::scoped_ptr<tiy::KeyboardDevice> keyboard_device;
#ifdef WIN32
  mouse_device.reset(new tiy::WindowsMouse(do_debugging));
  keyboard_device.reset(new tiy::WindowsKeyboard(do_debugging));
#else
  mouse_device.reset(new tiy::LinuxMouse(do_debugging));
  keyboard_device.reset(new tiy::LinuxKeyboard(do_debugging));
#endif

  int read_intervall_ms = 1;

  if ((input_device_src == "m") && (!mouse_device->openAndReadMouse(mouse_device_id, read_intervall_ms)))
  {
	std::cout << "MouseDevice::openAndReadMouse() failed" << std::endl;
	std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
	return 0;
  }

  if (!keyboard_device->openAndReadKeyboard(keyboard_device_id, read_intervall_ms))
  {
	std::cout << "KeyboardDevice::openAndReadKeyboard() failed" << std::endl;
	std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
	return 0;
  }


  // -------------------------------------------------------------------------------------
  // Stereo camera
  // -------------------------------------------------------------------------------------
  boost::scoped_ptr<tiy::StereoCamera> stereo_camera;

  std::string camera_id_left = m_track.left_camera_id;
  std::string camera_id_right = m_track.right_camera_id;
  if (input_src == "b")
  {
#ifdef USE_aravis
	  	  stereo_camera.reset(new tiy::BaslerGigEStereoCamera(do_debugging, camera_id_left, camera_id_right,
								m_track.frame_width, m_track.frame_height, m_track.camera_exposure, m_track.camera_gain, m_track.frame_rate));
#else
  	  	  std::cerr << "BaslerGigEStereoCamera not available, as aravis NOT found/used." << std::endl;
		  std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
  	  	  return 0;
#endif
  }
  else if (input_src == "o")
  		  stereo_camera.reset(new tiy::OpenCVStereoCamera(do_debugging, camera_id_left, camera_id_right,
								m_track.frame_width, m_track.frame_height, m_track.camera_exposure, m_track.camera_gain, m_track.frame_rate));
  else if (input_src == "v")
  		  stereo_camera.reset(new tiy::OpenCVStereoCamera(do_debugging, camera_id_left, camera_id_right,
								m_track.frame_width, m_track.frame_height, m_track.camera_exposure, m_track.camera_gain, m_track.frame_rate, video_left, video_right));
  else
  {
	  std::cerr << "No input source \"input_src\" specified in the configuration file \"" << arg_run_parameter_config_file << "\"" << std::endl;
	  std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
	  return 0;
  }


  if (stereo_camera->openCam())
	  stereo_camera->startCam();
  else
  {
	  std::cerr << "MarkerTracking::connectStereoCamera() failed" << std::endl;
	  std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
	  return 0;
  }

  cv::Mat image_left = stereo_camera->createImage();
  cv::Mat image_right = stereo_camera->createImage();
  long long int frame_timestamp;


  // -------------------------------------------------------------------------------------
  // BOOST ASIO MULTICAST SERVER
  // -------------------------------------------------------------------------------------
  boost::asio::io_service server_io_service;
  tiy::MulticastServer multicast_server(server_io_service, boost::asio::ip::address::from_string(multicast_adress), multicast_port, do_debugging);

  boost::system::error_code error_c;
  boost::thread server_io_service_thread(boost::bind(&boost::asio::io_service::run, &server_io_service, error_c));


  // -------------------------------------------------------------------------------------
  // Logging
  // -------------------------------------------------------------------------------------
  std::ofstream log_2D_left, log_2D_right, log_3D, log_object, log_virt_point;
  if (do_log_2D)
  {
	  log_2D_left.open(log_points_2D_left.c_str());
	  log_2D_right.open(log_points_2D_right.c_str());
  }
  if (do_log_3D)
	  log_3D.open(log_points_3D.c_str());
  if (do_log_object)
	  log_object.open(log_object_pose.c_str());
  if (do_log_virt_point)
	  log_virt_point.open(log_virt_point_pose.c_str());
  if (do_log_video)
	  stereo_camera->startRecording(log_video_left, log_video_right);


  // -------------------------------------------------------------------------------------
  // MAIN LOOP
  // -------------------------------------------------------------------------------------
  int capture_counter = 1;
  bool is_base_temp = false;
  int test_points_counter = 0;

  // time measurement
  boost::posix_time::ptime start_time, end_time;
  start_time = boost::posix_time::microsec_clock::universal_time();

  for(int i = 0; true; i++)
    {
	  // -------------------------------------------------------------------------------------
	  // Grab stereo frame
	  // -------------------------------------------------------------------------------------
	  if(!stereo_camera->grabFrame(image_left, image_right, frame_timestamp))
      {
		  if (input_src == "v")
    	  {
			  std::cout << "Video file finished." << std::endl;
		  	  std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
    		  return 0;
    	  }

    	  std::cerr << "Grabbing failed" << std::endl;
    	  std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
		  return 0;
      }

	  if (do_log_video)
		  stereo_camera->recordFrame();


      // -------------------------------------------------------------------------------------
      // Extract (or read from file) 2D points
      // -------------------------------------------------------------------------------------
      cv::vector<cv::Point2f> points_2D_left, points_2D_right;
#pragma omp parallel sections
      {
#pragma omp section
        {
        	if (input_src == "t")
        		m_track.get2DPointsFromFile("testpoints_left", &points_2D_left, test_points_counter);
        	else
        		m_track.get2DPointsFromImage(image_left, &points_2D_left);
        }
#pragma omp section
        {
        	if (input_src == "t")
    	    	m_track.get2DPointsFromFile("testpoints_right", &points_2D_right, test_points_counter);
        	else
        		m_track.get2DPointsFromImage(image_right, &points_2D_right);
        }
      }
      test_points_counter++;


      // -------------------------------------------------------------------------------------
      // Compute 3D points from 2D points
      // -------------------------------------------------------------------------------------
      cv::Mat points_3D = m_track.get3DPointsFrom2DPoints(points_2D_left, points_2D_right);


      // -------------------------------------------------------------------------------------
      // Search for marker objects (templates)
      // -------------------------------------------------------------------------------------
      std::vector<cv::Mat>RT_template_leftcam;
      std::vector<float>avg_dev;

      for(int t = 0; t < m_track.num_templates;t++)
      {
    	  RT_template_leftcam.push_back(cv::Mat::zeros(4,4,CV_32F));
    	  avg_dev.push_back(0);
      }
#pragma omp parallel for
      for(int r = 0; r < m_track.num_templates; r++)	  
    	  m_track.fit3DPointsToObjectTemplate(points_3D, r, RT_template_leftcam[r], &avg_dev[r]);

		  
      // -------------------------------------------------------------------------------------
      // Update mouse and keyboard status
      // -------------------------------------------------------------------------------------
      bool was_SPACE_pressed=false, was_ESC_pressed=false;

      keyboard_device->getStatusSinceLastReset(was_SPACE_pressed, was_ESC_pressed);
      if (was_ESC_pressed)
      {
    	  std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
    	  return 0;
      }
      keyboard_device->resetStatus();

	  bool was_left_button_pressed=false, was_left_button_released=false, is_left_button_pressed=false,
			  was_right_button_pressed=false, was_right_button_released=false, is_right_button_pressed=false,
				  has_mouse_wheel_changed=false;
	  static int mouse_wheel_position=0;

	  if (input_device_src == "m")
	  {
		  mouse_device->getStatusSinceLastReset(was_left_button_pressed, was_left_button_released, is_left_button_pressed,
										  was_right_button_pressed, was_right_button_released, is_right_button_pressed,
										  has_mouse_wheel_changed, mouse_wheel_position);
		  mouse_device->resetStatus();
	  }
	  

      // -------------------------------------------------------------------------------------
      // OUTPUT (Send/Display/Log) the selected data
      // -------------------------------------------------------------------------------------
	  if (!do_interactive_mode || ((input_device_src == "m") && was_left_button_pressed) || ((input_device_src == "k") && was_SPACE_pressed))
        {
	      // -------------------------------------------------------------------------------------
	      // Send (publish the object/virtual point pose over multicast)
	      // -------------------------------------------------------------------------------------
	      if(do_send_object_pose)
	        {
	    	  std::string send_string;
			  for(int r = 0; r < m_track.num_templates; r++)
			  {
				  cv::Mat rodrigues_orientation = cv::Mat::zeros(3, 1, CV_32F);
			      if (countNonZero(RT_template_leftcam[r]))
					 Rodrigues(RT_template_leftcam[r](cv::Range(0,3),cv::Range(0,3)), rodrigues_orientation);

			      int last_col = RT_template_leftcam[r].size.p[0] - 1;

				  std::stringstream frame_timestamp_ss; // as boost::format not compatible with long long int
				  frame_timestamp_ss << frame_timestamp;
				  std::string send_buffer = (boost::format("%s\t%d\t%.4f\t%.4f\t%.4f\t%.4f\t%.4f\t%.4f\t") % frame_timestamp_ss.str() % r 
											% RT_template_leftcam[r].at<float>(0,last_col) % RT_template_leftcam[r].at<float>(1,last_col) % RT_template_leftcam[r].at<float>(2,last_col)
											% rodrigues_orientation.at<float>(0,0) % rodrigues_orientation.at<float>(1,0) % rodrigues_orientation.at<float>(2,0) ).str();

				  send_string += send_buffer;
			  }

			  multicast_server.sendString(send_string);

			  if(do_debugging)
			  	std::cout << "-------------" << std::endl << "SENDING :" << send_string << std::endl << "----------------" << std::endl;
	        }			
		  if(do_send_virt_point_pose)
	        {
	    	  std::string send_string;
			  for(int r = 0; r < m_track.num_templates; r++)
			  {			  
				  cv::Mat RT_virt_point_to_leftcam = cv::Mat::zeros(4, 4, CV_32F);				
				  cv::Mat rodrigues_orientation = cv::Mat::zeros(3, 1, CV_32F);
				  if (countNonZero(RT_template_leftcam[r]) && countNonZero(m_track.RT_virt_point_to_template[r] - cv::Mat::eye(4, 4, CV_32F)))
				  { 
					RT_virt_point_to_leftcam = RT_template_leftcam[r] * m_track.RT_virt_point_to_template[r];
					Rodrigues(RT_virt_point_to_leftcam(cv::Range(0,3),cv::Range(0,3)), rodrigues_orientation);
				  }
			  
			      int last_col = RT_virt_point_to_leftcam.size.p[0] - 1;

				  std::stringstream frame_timestamp_ss; // as boost::format not compatible with long long int
				  frame_timestamp_ss << frame_timestamp;
				  std::string send_buffer = (boost::format("%s\t%.4f\t%.4f\t%.4f\t%.4f\t%.4f\t%.4f\t") % frame_timestamp_ss.str()
											% RT_virt_point_to_leftcam.at<float>(0,last_col) % RT_virt_point_to_leftcam.at<float>(1,last_col) % RT_virt_point_to_leftcam.at<float>(2,last_col)
											% rodrigues_orientation.at<float>(0,0) % rodrigues_orientation.at<float>(1,0) % rodrigues_orientation.at<float>(2,0) ).str();

				  send_string += send_buffer;
			  }
			  multicast_server.sendString(send_string);

			  if(do_debugging)
			  	std::cout << "-------------" << std::endl << "SENDING :" << send_string << std::endl << "----------------" << std::endl;
	        }
			
		  // -------------------------------------------------------------------------------------
		  // Display
		  // -------------------------------------------------------------------------------------
		  if (do_debugging)
		  {
			if (was_left_button_pressed)
				std::cout << "LEFT" << std::endl;
			if (was_left_button_released)
				std::cout << "LEFT RELEASED" << std::endl;
			if (was_right_button_pressed)
				std::cout << "RIGHT" << std::endl;
			if (was_right_button_released)
				std::cout << "RIGHT RELEASED" << std::endl;
			if (has_mouse_wheel_changed)
				std::cout << "WHEEL: " << mouse_wheel_position << std::endl;
			if (is_left_button_pressed)
				std::cout << "LEFT STILL" << std::endl;
			if (is_right_button_pressed)
				std::cout << "RIGHT STILL" << std::endl;

			if (was_SPACE_pressed)
				std::cout << "SPACE" << std::endl;
			if (was_ESC_pressed)
				std::cout << "ESC" << std::endl;
		  }
          if (do_output_2D)
            {
        	  std::cout << frame_timestamp;
        	  for(unsigned int p = 0; p < points_2D_left.size(); p++)
        		  std::cout << "\t" << points_2D_left[p].x << "\t" << points_2D_left[p].y;
        	  std::cout << std::endl;

        	  std::cout << frame_timestamp;
        	  for(unsigned int p = 0; p < points_2D_right.size(); p++)
        		  std::cout  << "\t" << points_2D_right[p].x << "\t" << points_2D_right[p].y;
        	  std::cout << std::endl;
            }
          if (do_output_3D)
            {
        	  std::cout << frame_timestamp;
			  for(int p = 0; p < points_3D.cols; p++)
				  std::cout  << "\t" << points_3D.at<float>(0,p) << "\t" << points_3D.at<float>(1,p) << "\t" << points_3D.at<float>(2,p);
			  std::cout << std::endl;
            }
    	  if (do_output_object)
    	    {
			  std::cout << frame_timestamp;
			  for(int r = 0; r < m_track.num_templates; r++)
			  {
				  cv::Mat rodrigues_orientation = cv::Mat::zeros(3, 1, CV_32F);
			      if (countNonZero(RT_template_leftcam[r]))
					 Rodrigues(RT_template_leftcam[r](cv::Range(0,3),cv::Range(0,3)), rodrigues_orientation);

			      int last_col = RT_template_leftcam[r].size.p[0] - 1;
			      std::cout << "\t" << RT_template_leftcam[r].at<float>(0,last_col) << "\t" << RT_template_leftcam[r].at<float>(1,last_col) << "\t" << RT_template_leftcam[r].at<float>(2,last_col) << "\t" << rodrigues_orientation.at<float>(0,0) << "\t" << rodrigues_orientation.at<float>(1,0) << "\t" << rodrigues_orientation.at<float>(2,0);
			      //std::cout << std::endl << "avg_dev = " << avg_dev[r];
			  }
			  std::cout << std::endl;
    	    }			
		  if (do_output_virt_point)
    	    {
			  std::cout << frame_timestamp;
			  for(int r = 0; r < m_track.num_templates; r++)
			  {				
				  cv::Mat RT_virt_point_to_leftcam = cv::Mat::zeros(4, 4, CV_32F);				
				  cv::Mat rodrigues_orientation = cv::Mat::zeros(3, 1, CV_32F);
				  if (countNonZero(RT_template_leftcam[r]) && countNonZero(m_track.RT_virt_point_to_template[r] - cv::Mat::eye(4, 4, CV_32F)))
				  { 
					RT_virt_point_to_leftcam = RT_template_leftcam[r] * m_track.RT_virt_point_to_template[r];
					Rodrigues(RT_virt_point_to_leftcam(cv::Range(0,3),cv::Range(0,3)), rodrigues_orientation);
				  }
				  
			      int last_col = RT_virt_point_to_leftcam.size.p[0] - 1;
			      std::cout << "\t" << RT_virt_point_to_leftcam.at<float>(0,last_col) << "\t" << RT_virt_point_to_leftcam.at<float>(1,last_col) << "\t" << RT_virt_point_to_leftcam.at<float>(2,last_col) << "\t" << rodrigues_orientation.at<float>(0,0) << "\t" << rodrigues_orientation.at<float>(1,0) << "\t" << rodrigues_orientation.at<float>(2,0);
			  }
			  std::cout << std::endl;
    	    }
			

		  // -------------------------------------------------------------------------------------
		  // Log
		  // -------------------------------------------------------------------------------------
		  if (do_log_2D)
          {
			  log_2D_left << frame_timestamp;
			  for(unsigned int p = 0; p < points_2D_left.size(); p++)
				  log_2D_left << "\t" << points_2D_left[p].x << "\t" << points_2D_left[p].y;
			  log_2D_left << std::endl;

			  log_2D_right << frame_timestamp;
			  for(unsigned int p = 0; p < points_2D_right.size(); p++)
				  log_2D_right  << "\t" << points_2D_right[p].x << "\t" << points_2D_right[p].y;
			  log_2D_right << std::endl;
          }
		  if (do_log_3D)
          {
			  log_3D << frame_timestamp;
			  for(int p = 0; p < points_3D.cols; p++)
				  log_3D  << "\t" << points_3D.at<float>(0,p) << "\t" << points_3D.at<float>(1,p) << "\t" << points_3D.at<float>(2,p);
			  log_3D << std::endl;
          }
		  if (do_log_object)
		  {
			  log_object << frame_timestamp;
			  for(int r = 0; r < m_track.num_templates; r++)
			  {
				  cv::Mat rodrigues_orientation = cv::Mat::zeros(3, 1, CV_32F);
			      if (countNonZero(RT_template_leftcam[r]))
					 Rodrigues(RT_template_leftcam[r](cv::Range(0,3),cv::Range(0,3)), rodrigues_orientation);

			      int last_col = RT_template_leftcam[r].size.p[0] - 1;
			      log_object << "\t" << RT_template_leftcam[r].at<float>(0,last_col) << "\t" << RT_template_leftcam[r].at<float>(1,last_col) << "\t" << RT_template_leftcam[r].at<float>(2,last_col) << "\t" << rodrigues_orientation.at<float>(0,0) << "\t" << rodrigues_orientation.at<float>(1,0) << "\t" << rodrigues_orientation.at<float>(2,0);
			      //log_object << std::endl << "avg_dev = " << avg_dev[r];
			  }
			  log_object << std::endl;
		  }
		  if (do_log_virt_point)
		  {
			  log_virt_point << frame_timestamp;			  
			  for(int r = 0; r < m_track.num_templates; r++)
			  {				  					
				  cv::Mat RT_virt_point_to_leftcam = cv::Mat::zeros(4, 4, CV_32F);				
				  cv::Mat rodrigues_orientation = cv::Mat::zeros(3, 1, CV_32F);
				  if (countNonZero(RT_template_leftcam[r]) && countNonZero(m_track.RT_virt_point_to_template[r] - cv::Mat::eye(4, 4, CV_32F)))
				  { 
					RT_virt_point_to_leftcam = RT_template_leftcam[r] * m_track.RT_virt_point_to_template[r];
					Rodrigues(RT_virt_point_to_leftcam(cv::Range(0,3),cv::Range(0,3)), rodrigues_orientation);
				  }

			      int last_col = RT_virt_point_to_leftcam.size.p[0] - 1;
				  log_virt_point << "\t" << RT_virt_point_to_leftcam.at<float>(0,last_col) << "\t" << RT_virt_point_to_leftcam.at<float>(1,last_col) << "\t" << RT_virt_point_to_leftcam.at<float>(2,last_col) << "\t" << rodrigues_orientation.at<float>(0,0) << "\t" << rodrigues_orientation.at<float>(1,0) << "\t" << rodrigues_orientation.at<float>(2,0);
			  }
			  log_virt_point << std::endl;
		  }
		  if (do_log_video)
			  stereo_camera->recordFrame();
        }

	  // -------------------------------------------------------------------------------------
      // Capture stereo frame
      // -------------------------------------------------------------------------------------
	  if (do_log_frame && (((input_device_src == "m") && was_left_button_pressed) || ((input_device_src == "k") && was_SPACE_pressed)))
		{			
		  std::string save_file;

		  save_file = (boost::format("%s%03i.jpg") % log_frame_left_prefix % capture_counter).str();
		  cv::imwrite(save_file, image_left);

		  save_file = (boost::format("%s%03i.jpg") % log_frame_right_prefix % capture_counter).str();
		  cv::imwrite(save_file, image_right);

		  if (do_debugging)
			  std::cout << frame_timestamp << "Frame captured." << std::endl;

		  capture_counter++;
		}


      // -------------------------------------------------------------------------------------
      // Visualize stereo frame with detected points
      // -------------------------------------------------------------------------------------
      if(do_show_graphics && !(input_src == "t"))
        {
    	  // needed, as changing image content (costs 0.5-1.5 [ms])
    	  cv::Mat image_left_cpy, image_right_cpy;
    	  image_left.copyTo(image_left_cpy);
    	  image_right.copyTo(image_right_cpy);

          for(unsigned int p=0; p < points_2D_left.size(); p++)
              cv::circle(image_left_cpy, points_2D_left[p], 2, cv::Scalar(0), 1, CV_AA, 0);
          for(unsigned int p=0; p < points_2D_right.size(); p++)
              cv::circle(image_right_cpy, points_2D_right[p], 2, cv::Scalar(0), 1, CV_AA, 0);

          cv::Mat object_rotation(3, 1, CV_32F);
          cv::Mat object_translation(3, 1, CV_32F);
          cv::vector<cv::Point2f> object_2D;

          for(int r = 0; r < m_track.num_templates; r++)
            {
			  if (avg_dev[r] < std::numeric_limits<float>::infinity())
              {
                  Rodrigues(RT_template_leftcam[r](cv::Range(0,3),cv::Range(0,3)), object_rotation);
                  object_translation = RT_template_leftcam[r](cv::Range(0,3),cv::Range(3,4)).t();
                  cv::vector<cv::Point3f> object_points;
                  object_points.push_back(cv::Point3f(RT_template_leftcam[r].at<float>(0,3), RT_template_leftcam[r].at<float>(1,3), RT_template_leftcam[r].at<float>(2,3)));
                  projectPoints(cv::Mat(object_points), cv::Mat::zeros(3,1,CV_32F), cv::Mat::zeros(3,1,CV_32F), m_track.KK_left, m_track.kc_left, object_2D);
                  cv::circle(image_left_cpy, object_2D[0], 4, cv::Scalar(255,255,255), 1, CV_AA, 0);
                  cv::circle(image_left_cpy, object_2D[0], 3, cv::Scalar(0,0,150), 1, CV_AA, 0);
                  projectPoints(cv::Mat(object_points), m_track.om_leftcam_to_rightcam, m_track.T_leftcam_to_rightcam, m_track.KK_right, m_track.kc_right, object_2D);
                  cv::circle(image_right_cpy, object_2D[0], 4, cv::Scalar(255,255,255), 1, CV_AA, 0);
                  cv::circle(image_right_cpy, object_2D[0], 3, cv::Scalar(0,0,150), 1, CV_AA, 0);
              }
            }

		  imshow("Image Left", image_left_cpy);
		  imshow("Image Right", image_right_cpy);

	      cv::waitKey(1);
        }


      // -------------------------------------------------------------------------------------
      // END MEASURE of the computation time (of one cycle)
      // -------------------------------------------------------------------------------------
      if (do_debugging)
      {
		end_time = boost::posix_time::microsec_clock::universal_time();
		boost::posix_time::time_duration time_diff = end_time - start_time;

		std::cout << "comp_time = " << time_diff.total_microseconds() << " [us]" << std::endl;

		start_time = boost::posix_time::microsec_clock::universal_time();
      }
    } //end MAIN LOOP

	if (log_2D_left.is_open())
		log_2D_left.close();
	if (log_2D_right.is_open())
		log_2D_right.close();
	if (log_3D.is_open())
		log_3D.close();
	if (log_object.is_open())
		log_object.close();

	stereo_camera->closeCam();

  std::cerr << "PRESS A KEY TO EXIT"; cv::destroyAllWindows(); cv::waitKey(1); std::cin.get();
  return 0;
}
Beispiel #2
0
void FindContour::singleCellDetection(const Mat &img, vector<Point> &cir_org,
                                      Mat &dispImg1, Mat &dispImg2,
                                      int &area, int &perimeter,
                                      Point2f &ctroid, float &shape,
                                      Mat &cell_alpha, // only the area inside cell (without background)
                                      vector<Point> &smooth_contour_curve, // relative position (without counting rect.x and rect.y)
                                      vector<Point> &smooth_contour_curve_abs, // absolut position
                                      Mat &blebsImg,
                                      Rect &rectangle,
                                      //vector<int> &blebs,
                                      int &frameNum)
{
    frame = &img;

    vector<Point> cir; //***global coordinates of circle***
    //cout << "[";
    for(unsigned int i = 0; i < cir_org.size(); i++){
        cir.push_back(Point(cir_org[i].x / scale, cir_org[i].y / scale));
        //cout << int(cir_org[i].x / scale) << ", " << int(cir_org[i].y / scale) << "; ";
    }
    //cout << "]" << endl;

    //enlarge the bounding rect by adding a margin (e) to it
    rect = enlargeRect(boundingRect(Mat(cir)), 5, img.cols, img.rows);
    //cout << "rect_roi " << boundingRect(Mat(cir)) << "\n";
    //cout << "enlarged rect " << rect << endl;

    dispImg1 = (*frame)(rect).clone();

    Mat sub; //*** the rectangle region of ROI (Gray) ***
    cv::cvtColor(dispImg1, sub, CV_RGB2GRAY);
    int width = sub.cols;
    int height = sub.rows;

    rectangle = rect;

//    Mat canny;
//    CannyWithBlur(sub, canny);
//    imshow("canny", canny);

    vector<Point> circle_ROI; //***local coordinates of circle***
    for (unsigned int i = 0; i < cir.size(); i++){
        Point p = Point(cir[i].x - rect.x, cir[i].y - rect.y);
        circle_ROI.push_back(p);
    }

    Mat adapThreshImg1 = Mat::zeros(height, width, sub.type());
    //image edge detection for the sub region (roi rect)
    adaptiveThreshold(sub, adapThreshImg1, 255.0, ADAPTIVE_THRESH_GAUSSIAN_C,
                          CV_THRESH_BINARY_INV, blockSize, constValue);
    //imshow("adapThreshImg1", adapThreshImg1);

    // dilation and erosion
    Mat dilerod;
    dilErod(adapThreshImg1, dilerod, dilSize);

    //display image 2 -- dilerod of adaptive threshold image
    GaussianBlur(dilerod, dilerod, Size(3, 3), 2, 2 );

    //mask for filtering out the cell of interest
    Mat mask_conv = Mat::zeros(height, width, CV_8UC1);
    fillConvexPoly(mask_conv, circle_ROI, Scalar(255));
    //imshow("mask_before", mask_conv);

    //dilate the mask -> region grows
    Mat mask_conv_dil;
    Mat element = getStructuringElement( MORPH_ELLIPSE,
                                         Size( 2*dilSize+1, 2*dilSize+1 ),
                                         Point(dilSize,dilSize) );
    dilate(mask_conv, mask_conv_dil, element);
    //imshow("mask_dil", mask_conv_dil);

    //bitwise AND on mask and dilerod
    bitwise_and(mask_conv_dil, dilerod, dispImg2);

    // findcontours
    vector<vector<Point> > contours;
    vector<Vec4i> hierarchy;
    unsigned int largest_contour_index;
    dilErodContours(dispImg2, contours, hierarchy, largest_contour_index, perimeter, dispImg1);

    // find the area of the cell by counting the white area inside the largest contour
    Mat cellArea = Mat::zeros(height, width, CV_8UC1);
    drawContours(cellArea, contours, largest_contour_index, Scalar(255), -1, 8, hierarchy, 0, Point() );
    //imshow("cellArea", cellArea);
    area = countNonZero(cellArea);

    //cout << "frame " << frameNum << "\n";
    //cout << contours[largest_contour_index] << endl;

    //renew circle points as the convex hull
    vector<Point> convHull;
    convexHull(contours[largest_contour_index], convHull);

    // find the centroid of the contour
    Moments mu = moments(contours[largest_contour_index]);
    ctroid = Point2f(mu.m10/mu.m00 + rect.x, mu.m01/mu.m00 + rect.y);

    // find the shape of the cell by the largest contour and centroid
    shape = findShape(ctroid, contours[largest_contour_index]);

    ////---- draw largest contour start ----
    //draw the largest contour
    Mat borderImg = Mat::zeros(height, width, CV_8UC1);
    drawContours(borderImg, contours, largest_contour_index, Scalar(255), 1, 8, hierarchy, 0, Point());
    //QString cellFileName0 = "border" + QString::number(frameNum) + ".png";
    //imwrite(cellFileName0.toStdString(), borderImg);


    Mat cell;
    bitwise_and(cellArea, sub, cell);
    //cell_alpha = createAlphaMat(cell);  // cell image with exactly the contour detected
    //vector<int> compression_params;
    //compression_params.push_back(CV_IMWRITE_PNG_COMPRESSION);
    //compression_params.push_back(9);
//    QString cellFileName1 = "cell" + QString::number(frameNum) + ".png";
//    imwrite(cellFileName1.toStdString(), cell_alpha, compression_params);
    ////---- draw largest contour end ----

    // find the number and the sizes of blebs of the cell
    Mat smooth;
    vector<Point> smoothCurve;
    int WIN = 25;
    smooth = curveSmooth(borderImg, WIN, contours[largest_contour_index], smoothCurve, convHull);
    //smooth = curveSmooth(borderImg, WIN, contours[largest_contour_index], smoothCurve, ctroid/*Point(ctroid.x, ctroid.y)*/);
    //drawPointVectors(dispImg1, smoothCurve, 1, Scalar(159, 120, 28));


    Mat smooth_contour;
    int w = 10;
    smooth_contour = curveSmooth(borderImg, w, contours[largest_contour_index], smooth_contour_curve, convHull);
    //smooth_contour = curveSmooth(borderImg, w, contours[largest_contour_index], smooth_contour_curve, ctroid/*Point(ctroid.x, ctroid.y)*/);
    //imshow("smooth_contour", smooth_contour);

    for(unsigned int i = 0; i < smooth_contour_curve.size(); i++){
         Point p(smooth_contour_curve[i].x + rect.x, smooth_contour_curve[i].y + rect.y);
         smooth_contour_curve_abs.push_back(p);
    }

//    cout << "ctroid X " << ctroid.x << " Y " << ctroid.y << endl;
////    for(unsigned int i = 0; i < contours[largest_contour_index].size(); i++)
////        cout << "(" << contours[largest_contour_index][i].x + rect.x << ", " << contours[largest_contour_index][i].y + rect.y << ") ";
////    cout << endl;
//    for(unsigned int i = 0; i < smooth_contour_curve_abs.size(); i++)
//        cout << "(" << smooth_contour_curve_abs[i].x << ", " << smooth_contour_curve_abs[i].y << ") ";
//    cout << endl;

    //cout << mask_conv_dil.type() << " " << sub.type() << endl;
    Mat cell_convex;
    bitwise_and(smooth_contour, sub, cell_convex);
    cell_alpha = createAlphaMat(cell_convex);
//    imshow("cell_convex_contour", cell_alpha);
    dispImg2 = cell_convex.clone();

    //change dispImg2 from gray to rgb for displaying
    cvtColor(dispImg2, dispImg2, CV_GRAY2RGB);

    bitwise_not(smooth, smooth);
    //Mat blebsImg;
    bitwise_and(smooth, cellArea, blebsImg);
//    imshow("blebs", blebsImg);
//    QString cellFileName2 = "blebs" + QString::number(frameNum) + ".png";
//    imwrite(cellFileName2.toStdString(), blebs);

    //QString cellFileName2 = "dispImg1" + QString::number(frameNum) + ".png";
    //imwrite(cellFileName2.toStdString(), dispImg1);

    cir_org.clear();
    for(unsigned int i = 0; i < convHull.size(); i++)
        cir_org.push_back(Point((convHull[i].x + rect.x)*scale, (convHull[i].y + rect.y)*scale));
}
Beispiel #3
0
int Judgement::JudgementYON(Mat &image)
{
	int success = 0;
	MatND dstHist;
	Mat histoImg = image.clone();
	calcHist(&histoImg, 1, &channels, Mat(), dstHist, 1, &size, ranges);
	Mat dstImg(256, 256, CV_8U, Scalar(0));//画直方图
	double minValue = 0;
	double maxValue = 0;
	Point maxloc;
	minMaxLoc(dstHist, &minValue, &maxValue, NULL, &maxloc);
	//cout << "	     " << n << "." << m << "	     " << maxValue << endl;
	int hpt = saturate_cast<int>(0.9 * 256);
	vector<int> Boundnum;
	for (int j = 0; j < 256; j++)
	{
		float binValue = dstHist.at<float>(j);
		int realValue = saturate_cast<int>(binValue * hpt / maxValue);
		if (realValue != 0)
		{
			rectangle(dstImg, Point(j, 255), Point(j, 256 - realValue), Scalar(255));
			Boundnum.push_back(j);
		}

	}
	int  maxdata = *max_element(Boundnum.begin(), Boundnum.end());
	int  mindata = *min_element(Boundnum.begin(), Boundnum.end());//寻找直方图动态范围

	Rect recttemp;
	recttemp.x = maxloc.x;
	recttemp.y = maxloc.y - int((maxdata - mindata)*0.15);
	recttemp.width = 1;
	recttemp.height = int((maxdata - mindata)*0.3);
	rectangle(dstHist, recttemp, Scalar(0), -1);
	minMaxLoc(dstHist, &minValue, &maxValue, NULL, &maxloc);
	int anoThres = maxloc.y;//寻找次峰值

	Scalar avgnum;
	Mat StdDevImg;
	meanStdDev(histoImg, avgnum, StdDevImg);
	double Stdnum = StdDevImg.at<double>(Point(0, 0));

	int ThreStep = maxdata - mindata;
	int StepNum = 30;
	int OrStep = mindata + int(ThreStep / 10);
	int Dstep = int(ThreStep / 30.0 + 0.5);
	if (Dstep == 0)
	{
		Dstep = 1;
		StepNum = ThreStep;
	}
	Mat TempImg;
	histoImg.copyTo(TempImg);
	vector<vector<Point>> contours;
	vector<Vec4i> hierarchy;
	Point pointSN, maxPoint = Point(0, 0);
	int Marknumone = 0;
	int Marknumtwo = 0;
	int Marknumthree = 0;
	for (int i = 0; i < StepNum; i++)
	{
		vector<Point> SN;
		OrStep = OrStep + Dstep;
		threshold(histoImg, TempImg, OrStep, 255, CV_THRESH_BINARY);


		/*Mat element = getStructuringElement(MORPH_RECT,Size(2,2));
		erode(TempImg, TempImg, cv::Mat());
		dilate(TempImg, TempImg, cv::Mat());*/
		TempImg = ~TempImg;


		/*stringstream strstrone;
		strstrone << "水渍动态图" << i << ".jpg";
		imwrite(strstrone.str(), TempImg);*/

		Mat BoundImg(TempImg.rows, TempImg.cols, CV_8UC1, Scalar(255));
		Rect Wrect;
		Wrect.x = 1;
		Wrect.y = 1;
		Wrect.width = BoundImg.cols - 2;
		Wrect.height = BoundImg.rows - 2;
		rectangle(BoundImg, Wrect, Scalar(0), -1);

		Mat PlusImg(TempImg.rows + 2, TempImg.cols + 2, CV_8UC1, Scalar(255));
		Mat PlusROI = PlusImg(Rect(1, 1, TempImg.cols, TempImg.rows));
		TempImg.copyTo(PlusROI);
		Mat ContoursImg = PlusImg.clone();

		findContours(ContoursImg, contours, hierarchy, RETR_TREE, CV_CHAIN_APPROX_SIMPLE);
		for (size_t j = 0; j < contours.size(); j++)
		{
			double area = cv::contourArea(contours[j]);
			pointSN.x = int(area);
			pointSN.y = j;
			SN.push_back(pointSN);
		}

		if (contours.size() != 0)
		{
			sort(SN.begin(), SN.end(), SortByM2);
			maxPoint = SN.back();
			if (OrStep > anoThres - 5 && OrStep<anoThres + 20)
				Dstep = 1;
			else
			{
				Dstep = int(ThreStep / 30.0 + 0.5);
			}
			if (Dstep == 0)
				Dstep = 1;
			int k = maxPoint.y;


			Mat MarkImg(TempImg.rows, TempImg.cols, CV_8UC1, Scalar(0));
			drawContours(MarkImg, contours, k, Scalar(255), -1);
			bitwise_and(BoundImg, MarkImg, MarkImg);
			int Mbound = 0;//判断轮廓是否到边界
			Mbound = countNonZero(MarkImg);
			if (Mbound>0.5*(histoImg.cols))
				break;
			if (contours[k].size() <= 4)
				continue;
			int son = hierarchy[k][2];
			Point gravitycore = barycenter(contours[k]);//寻找轮廓重心

			Rect maxcontours = boundingRect(contours[k]);
			int wValue = maxcontours.width / 12;
			gravitycore = gravitycore + Point(wValue - 1, wValue - 1);

			Mat gravityImg(TempImg.rows + 2 * wValue, TempImg.cols + 2 * wValue, CV_8UC1, Scalar(0));
			Mat gravityImgROI = gravityImg(Rect(wValue, wValue, TempImg.cols, TempImg.rows));
			TempImg.copyTo(gravityImgROI);


			Rect gravityrect = Rect(gravitycore - Point(1, 1), gravitycore + Point(2 * wValue, 2 * wValue) - Point(2, 2));//画出重心周围(2 * wValue)*(2 * wValue)的矩形区域
			if (gravityrect.x < 0 || gravityrect.y < 0)
				continue;

			int avnum = countNonZero(gravityImg(Rect(gravityrect)));
			vector<Point> hull;
			convexHull(contours[k], hull, false);
			double promark = (contourArea(contours[k])) / (contourArea(hull));

			if (son >= 0)//判断是否为父轮廓
			{
				int sonarea = 0;
				for (size_t j = 0; j < contours.size(); j++)
				{
					if (hierarchy[j][3] == k&&contourArea(contours[j])>4.0)
						sonarea = sonarea + contourArea(contours[j]);
				}
				if (50 * sonarea>maxPoint.x)//此处忽略一些偶然出现的中空点
					Marknumone++;
			}
			if (avnum < double(0.5 * gravityrect.width*gravityrect.width))//在重心区域中的白色点的数量是否过半
				Marknumtwo++;
			if (promark < 0.6)
				Marknumthree++;
		}

	}

	if (Marknumone > 2 || Marknumtwo >= 2 || Marknumthree > 3)//缺陷点也可能偶然出现包含
	{
		/*cout << "该点是水渍2" << endl;*/

	}
	else
	{
		/*cout << "该点是缺陷2" << endl;*/
		success++;
	}
	return success;
}
Beispiel #4
0
    int recoverPose( InputArray E, InputArray _points1, InputArray _points2, InputArray _cameraMatrix,
                         OutputArray _R, OutputArray _t, InputOutputArray _mask)
    {

        Mat points1, points2, cameraMatrix;
        _points1.getMat().convertTo(points1, CV_64F);
        _points2.getMat().convertTo(points2, CV_64F);
        _cameraMatrix.getMat().convertTo(cameraMatrix, CV_64F);

        int npoints = points1.checkVector(2);
        CV_Assert( npoints >= 0 && points2.checkVector(2) == npoints &&
                                  points1.type() == points2.type());

        CV_Assert(cameraMatrix.rows == 3 && cameraMatrix.cols == 3 && cameraMatrix.channels() == 1);

        if (points1.channels() > 1)
        {
            points1 = points1.reshape(1, npoints);
            points2 = points2.reshape(1, npoints);
        }

        double fx = cameraMatrix.at<double>(0,0);
        double fy = cameraMatrix.at<double>(1,1);
        double cx = cameraMatrix.at<double>(0,2);
        double cy = cameraMatrix.at<double>(1,2);

        points1.col(0) = (points1.col(0) - cx) / fx;
        points2.col(0) = (points2.col(0) - cx) / fx;
        points1.col(1) = (points1.col(1) - cy) / fy;
        points2.col(1) = (points2.col(1) - cy) / fy;

        points1 = points1.t();
        points2 = points2.t();

        Mat R1, R2, t;
        decomposeEssentialMat(E, R1, R2, t);
        Mat P0 = Mat::eye(3, 4, R1.type());
        Mat P1(3, 4, R1.type()), P2(3, 4, R1.type()), P3(3, 4, R1.type()), P4(3, 4, R1.type());
        P1(Range::all(), Range(0, 3)) = R1 * 1.0; P1.col(3) = t * 1.0;
        P2(Range::all(), Range(0, 3)) = R2 * 1.0; P2.col(3) = t * 1.0;
        P3(Range::all(), Range(0, 3)) = R1 * 1.0; P3.col(3) = -t * 1.0;
        P4(Range::all(), Range(0, 3)) = R2 * 1.0; P4.col(3) = -t * 1.0;

        // Do the cheirality check.
        // Notice here a threshold dist is used to filter
        // out far away points (i.e. infinite points) since
        // there depth may vary between postive and negtive.
        double dist = 50.0;
        Mat Q;
        triangulatePoints(P0, P1, points1, points2, Q);
        Mat mask1 = Q.row(2).mul(Q.row(3)) > 0;
        Q.row(0) /= Q.row(3);
        Q.row(1) /= Q.row(3);
        Q.row(2) /= Q.row(3);
        Q.row(3) /= Q.row(3);
        mask1 = (Q.row(2) < dist) & mask1;
        Q = P1 * Q;
        mask1 = (Q.row(2) > 0) & mask1;
        mask1 = (Q.row(2) < dist) & mask1;

        triangulatePoints(P0, P2, points1, points2, Q);
        Mat mask2 = Q.row(2).mul(Q.row(3)) > 0;
        Q.row(0) /= Q.row(3);
        Q.row(1) /= Q.row(3);
        Q.row(2) /= Q.row(3);
        Q.row(3) /= Q.row(3);
        mask2 = (Q.row(2) < dist) & mask2;
        Q = P2 * Q;
        mask2 = (Q.row(2) > 0) & mask2;
        mask2 = (Q.row(2) < dist) & mask2;

        triangulatePoints(P0, P3, points1, points2, Q);
        Mat mask3 = Q.row(2).mul(Q.row(3)) > 0;
        Q.row(0) /= Q.row(3);
        Q.row(1) /= Q.row(3);
        Q.row(2) /= Q.row(3);
        Q.row(3) /= Q.row(3);
        mask3 = (Q.row(2) < dist) & mask3;
        Q = P3 * Q;
        mask3 = (Q.row(2) > 0) & mask3;
        mask3 = (Q.row(2) < dist) & mask3;

        triangulatePoints(P0, P4, points1, points2, Q);
        Mat mask4 = Q.row(2).mul(Q.row(3)) > 0;
        Q.row(0) /= Q.row(3);
        Q.row(1) /= Q.row(3);
        Q.row(2) /= Q.row(3);
        Q.row(3) /= Q.row(3);
        mask4 = (Q.row(2) < dist) & mask4;
        Q = P4 * Q;
        mask4 = (Q.row(2) > 0) & mask4;
        mask4 = (Q.row(2) < dist) & mask4;

        mask1 = mask1.t();
        mask2 = mask2.t();
        mask3 = mask3.t();
        mask4 = mask4.t();

        // If _mask is given, then use it to filter outliers.
        if (!_mask.empty())
        {
            Mat mask = _mask.getMat();
            CV_Assert(mask.size() == mask1.size());
            bitwise_and(mask, mask1, mask1);
            bitwise_and(mask, mask2, mask2);
            bitwise_and(mask, mask3, mask3);
            bitwise_and(mask, mask4, mask4);
        }
        if (_mask.empty() && _mask.needed())
        {
            _mask.create(mask1.size(), CV_8U);
        }

        CV_Assert(_R.needed() && _t.needed());
        _R.create(3, 3, R1.type());
        _t.create(3, 1, t.type());

        int good1 = countNonZero(mask1);
        int good2 = countNonZero(mask2);
        int good3 = countNonZero(mask3);
        int good4 = countNonZero(mask4);

        if (good1 >= good2 && good1 >= good3 && good1 >= good4)
        {
            R1.copyTo(_R);
            t.copyTo(_t);
            if (_mask.needed()) mask1.copyTo(_mask);
            return good1;
        }
        else if (good2 >= good1 && good2 >= good3 && good2 >= good4)
        {
            R2.copyTo(_R);
            t.copyTo(_t);
            if (_mask.needed()) mask2.copyTo(_mask);
            return good2;
        }
        else if (good3 >= good1 && good3 >= good2 && good3 >= good4)
        {
            t = -t;
            R1.copyTo(_R);
            t.copyTo(_t);
            if (_mask.needed()) mask3.copyTo(_mask);
            return good3;
        }
        else
        {
            t = -t;
            R2.copyTo(_R);
            t.copyTo(_t);
            if (_mask.needed()) mask4.copyTo(_mask);
            return good4;
        }
    }
Beispiel #5
0
// get ROI + edgeDectection
void FindContour::cellDetection(const Mat &img, vector<Point> &cir_org,
                                Mat &dispImg1, Mat &dispImg2,
                                vector<Point2f> &points1, vector<Point2f> &points2,
                                int &area,
                                int &perimeter,
                                Point2f &ctroid,
                                float &shape,
//                                vector<int> &blebs,
                                int &frameNum){
    frame = &img;
    //rect = boundingRect(Mat(cir));


    Mat frameGray;
    cv::cvtColor(*frame, frameGray, CV_RGB2GRAY);
/*
    QString cellFileName0 = "frame" + QString::number(frameNum) + ".png";
    imwrite(cellFileName0.toStdString(), frameGray);*/

    vector<Point> cir; //***global coordinates of circle***
    for(unsigned int i = 0; i < cir_org.size(); i++){
        cir.push_back(Point(cir_org[i].x / scale, cir_org[i].y / scale));
    }
    //cout << "original circle: " << cir_org << "\n" << " scaled circle: " << cir << endl;

    //enlarge the bounding rect by adding a margin (e) to it
    rect = enlargeRect(boundingRect(Mat(cir)), 5, img.cols, img.rows);

    //global circle mask
    Mat mask_cir_org = Mat::zeros(frame->size(), CV_8UC1);
    fillConvexPoly(mask_cir_org, cir, Scalar(255));

    // flow points
    vector<unsigned int> cell_pts_global;
    vector<Point2f> longOptflow_pt1, longOptflow_pt2;
    Point2f avrg_vec = Point2f(0,0);
    for(unsigned int i = 0; i < points1.size(); i++){
        Point p1 = Point(points1[i].x, points1[i].y);
        Point p2 = Point(points2[i].x, points2[i].y);
        if(mask_cir_org.at<uchar>(p1.y,p1.x) == 255 ){
            cell_pts_global.push_back(i);
            if(dist_square(p1, p2) > 2.0){
                longOptflow_pt1.push_back(Point2f(p1.x, p1.y));
                longOptflow_pt2.push_back(Point2f(p2.x, p2.y));
                avrg_vec.x += (p2.x-p1.x);
                avrg_vec.y += (p2.y-p1.y);
            }
        }
    }

//    if(longOptflow_pt1.size()!= 0){
//        avrg_vec.x = avrg_vec.x / longOptflow_pt1.size();
//        avrg_vec.y = avrg_vec.y / longOptflow_pt1.size();
//    }
    Rect trans_rect = translateRect(rect, avrg_vec);


    // ***
    // if (the homography is a good one) use the homography to update the rectangle
    // otherwise use the same rectangle
    // ***
    if (longOptflow_pt1.size() >= 4){
        Mat H = findHomography(Mat(longOptflow_pt1), Mat(longOptflow_pt2), CV_RANSAC, 2);
        //cout << "H: " << H << endl;

        if(determinant(H) >= 0){
            vector<Point> rect_corners;
            rect_corners.push_back(Point(rect.x, rect.y));
            rect_corners.push_back(Point(rect.x+rect.width, rect.y));
            rect_corners.push_back(Point(rect.x, rect.y+rect.height));
            rect_corners.push_back(Point(rect.x+rect.width, rect.y+rect.height));

            vector<Point> rect_update_corners = pointTransform(rect_corners, H);
            trans_rect = boundingRect(rect_update_corners);
        }
    }


    rectangle(frameGray, trans_rect, Scalar(255), 2);
    imshow("frameGray", frameGray);









    dispImg1 = (*frame)(rect).clone();
    //dispImg2 = Mat(dispImg1.rows, dispImg1.cols, CV_8UC3);

    Mat sub; //*** the rectangle region of ROI (Gray) ***
    cv::cvtColor(dispImg1, sub, CV_RGB2GRAY);
    int width = sub.cols;
    int height = sub.rows;

    vector<Point> circle_ROI; //***local coordinates of circle***
    for (unsigned int i = 0; i < cir.size(); i++){
        Point p = Point(cir[i].x - rect.x, cir[i].y - rect.y);
        circle_ROI.push_back(p);
    }

    Mat adapThreshImg1 = Mat::zeros(height, width, sub.type());
    //image edge detection for the sub region (roi rect)
    adaptiveThreshold(sub, adapThreshImg1, 255.0, ADAPTIVE_THRESH_GAUSSIAN_C,
                          CV_THRESH_BINARY_INV, blockSize, constValue);
    //imshow("adapThreshImg1", adapThreshImg1);

    // dilation and erosion
    Mat dilerod;
    dilErod(adapThreshImg1, dilerod, dilSize);

    //display image 2 -- dilerod of adaptive threshold image
    GaussianBlur(dilerod, dilerod, Size(3, 3), 2, 2 );

    //mask for filtering out the cell of interest
    Mat mask_conv = Mat::zeros(height, width, CV_8UC1);
    fillConvexPoly(mask_conv, circle_ROI, Scalar(255));
    //imshow("mask_before", mask_conv);

    //dilate the mask -> region grows
    Mat mask_conv_dil;
    Mat element = getStructuringElement( MORPH_ELLIPSE, Size( 2*2+2, 2*2+1 ), Point(2,2) );
    dilate(mask_conv, mask_conv_dil, element);
    //imshow("mask_dil", mask_conv_dil);



    /*
    Mat mask_conv_ero;
    erode(mask_conv, mask_conv_ero, element);
    Mat ring_dil, ring_ero;
    bitwise_xor(mask_conv, mask_conv_dil, ring_dil);
    bitwise_xor(mask_conv, mask_conv_ero, ring_ero);
    Mat ring;
    bitwise_or(ring_dil, ring_ero, ring);
    imshow("ring", ring);

    vector<unsigned int> opt_onRing_index;
    // use optflow info set rectangle
    for(unsigned int i = 0; i < points2.size(); i++){
        Point p2 = Point(points2[i].x, points2[i].y);
        Point p1 = Point(points1[i].x, points1[i].y);
        if(ring.at<uchar>(p1.y,p1.x) != 255 &&
                ring.at<uchar>(p2.y,p2.x) != 255)
            continue;
        else{
            opt_onRing_index.push_back(i);
        }
    }*/

    /*
    // draw the optflow on dispImg1
    unsigned int size = opt_inside_cl_index.size();
    for(unsigned int i = 0; i < size; i++ ){
        Point p0( ceil( points1[i].x - rect.x ), ceil( points1[i].y - rect.y ) );
        Point p1( ceil( points2[i].x - rect.x ), ceil( points2[i].y - rect.y) );
        //std::cout << "(" << p0.x << "," << p0.y << ")" << "\n";
        //std::cout << "(" << p1.x << "," << p1.y << ")" << std::endl;

        //draw lines to visualize the flow
        double angle = atan2((double) p0.y - p1.y, (double) p0.x - p1.x);
        double arrowLen = 0.01 * (double) (width);
        line(dispImg1, p0, p1, CV_RGB(255,255,255), 1 );
        Point p;
        p.x = (int) (p1.x + arrowLen * cos(angle + 3.14/4));
        p.y = (int) (p1.y + arrowLen * sin(angle + 3.14/4));
        line(dispImg1, p, p1, CV_RGB(255,255,255), 1 );
        p.x = (int) (p1.x + arrowLen * cos(angle - 3.14/4));
        p.y = (int) (p1.y + arrowLen * sin(angle - 3.14/4));

        line(dispImg1, p, Point(2*p1.x - p0.x, 2*p1.y - p0.y), CV_RGB(255,255,255), 1 );
        //line(dispImg1, p, p1, CV_RGB(255,255,255), 1 );
    }*/

/*
    //stop growing when meeting with canny edges that outside the circle

    Mat canny;
    CannyWithBlur(sub, canny);
    Mat cannyColor;
    cvtColor(canny, cannyColor, CV_GRAY2RGB);
    imwrite("canny.png", canny);
    vector<Point> outsideCircle;
    vector<Point> onRing;
    for(int j = 0; j < height; j++){
        for(int i = 0; i < width; i++){
            if(canny.at<uchar>(j,i) != 0 && mask_conv.at<uchar>(j,i) == 0){
                cannyColor.data[cannyColor.channels()*(cannyColor.cols*j + i)+0] = 81;
                cannyColor.data[cannyColor.channels()*(cannyColor.cols*j + i)+1] = 172;
                cannyColor.data[cannyColor.channels()*(cannyColor.cols*j + i)+2] = 251;
                outsideCircle.push_back(Point(i, j));
                if(ring.at<uchar>(j,i) != 0){
                    cannyColor.data[cannyColor.channels()*(cannyColor.cols*j + i)+0] = 255;
                    cannyColor.data[cannyColor.channels()*(cannyColor.cols*j + i)+1] = 255;
                    cannyColor.data[cannyColor.channels()*(cannyColor.cols*j + i)+2] = 0;
                    onRing.push_back(Point(i,j));
                }
            }
        }
    } */

//    QString cannyFileName = "canny" + QString::number(frameNum) + ".png";
//    imwrite(cannyFileName.toStdString(), cannyColor);



    //bitwise AND on mask and dilerod
    bitwise_and(mask_conv/*_dil*/, dilerod, dispImg2);

    // findcontours
    vector<vector<Point> > contours;
    vector<Vec4i> hierarchy;
    unsigned int largest_contour_index;
    dilErodContours(dispImg2, contours, hierarchy, largest_contour_index, perimeter, dispImg1);

    // find the area of the cell by counting the white area inside the largest contour
    Mat cellArea = Mat::zeros(height, width, CV_8UC1);
    drawContours(cellArea, contours, largest_contour_index, Scalar(255), -1, 8, hierarchy, 0, Point() );
    //imshow("cell", cell);
    area = countNonZero(cellArea);

    //cout << "frame " << frameNum << "\n";
    //cout << contours[largest_contour_index] << endl;


    //change dispImg2 from gray to rgb for displaying
    cvtColor(dispImg2, dispImg2, CV_GRAY2RGB);

    //renew circle points as the convex hull
    vector<Point> convHull;
    convexHull(contours[largest_contour_index], convHull);


    // find the centroid of the contour
    Moments mu = moments(contours[largest_contour_index]);
    ctroid = Point2f(mu.m10/mu.m00 + rect.x, mu.m01/mu.m00 + rect.y);

    // find the shape of the cell by the largest contour and centroid
    shape = findShape(ctroid, contours[largest_contour_index]);

    ////---- draw largest contour start ----
    //draw the largest contour
    Mat borderImg = Mat::zeros(height, width, CV_8UC1);
    drawContours(borderImg, contours, largest_contour_index, Scalar(255), 1, 8, hierarchy, 0, Point());
    //QString cellFileName0 = "border" + QString::number(frameNum) + ".png";
    //imwrite(cellFileName0.toStdString(), borderImg);
    ////---- draw largest contour end ----

    // find the number and the sizes of blebs of the cell
    Mat smooth;
    vector<Point> smoothCurve;
    int WIN = 25;
    vector< vector<Point> > tmp;
    smooth = curveSmooth(borderImg, WIN, contours[largest_contour_index], smoothCurve, convHull/*ctroid*/);
    tmp.push_back(smoothCurve);
    drawContours(dispImg1, tmp, 0, Scalar(255, 0, 0));

    bitwise_not(smooth, smooth);
    Mat blebsImg;
    bitwise_and(smooth, cellArea, blebsImg);
    //imshow("blebs", blebs);
    //QString cellFileName2 = "blebs" + QString::number(frameNum) + ".png";
    //imwrite(cellFileName2.toStdString(), blebs);

//    vector<Point> blebCtrs;
//    recursive_connected_components(blebsImg, blebs, blebCtrs);
//    for(unsigned int i = 0; i < blebCtrs.size(); i++){
//        circle(dispImg1, blebCtrs[i], 2, Scalar(255, 255, 0));
//    }


    cir_org.clear();
    for(unsigned int i = 0; i < convHull.size(); i++)
        cir_org.push_back(Point((convHull[i].x + rect.x)*scale, (convHull[i].y + rect.y)*scale));

}
Beispiel #6
0
/*
 * Return the color ratio
 * @param index of he sub image
 */
double FeaturesExtractor::getRatioColor(int index) {
	return (double) countNonZero(binaryBox[index]) / ((double) binaryBox[index].rows*binaryBox[index].cols);
}
 std::size_t operator()(const T & element) const
 {
     return countNonZero(element);
 }
Beispiel #8
0
/**
 * Find a list of candidate marker from a given scene
 *
 * @param current frame, in grayscale 8UC1 format
 * @return a list of marker candidates
 **/
vector<Marker> MarkerDetector::findMarkerCandidates( Mat& frame ) {
    vector<Marker> candidates;
    
    /* Do some thresholding, in fact you should tune the parameters here a bit */
    Mat thresholded;
    threshold( frame, thresholded, 50.0, 255.0, CV_THRESH_BINARY );
    
    /* Find contours */
    vector<vector<Point>> contours;
    findContours( thresholded.clone(), contours, CV_RETR_LIST, CV_CHAIN_APPROX_NONE );
    
    for( vector<Point> contour: contours ) {
        /* Approximate polygons out of these contours */
        vector<Point> approxed;
        approxPolyDP( contour, approxed, contour.size() * 0.05, true );
        
        /* Make sure it passes our first candidate check */
        if( !checkPoints( approxed ) )
            continue;
        
        /* Do some perspective transformation on the candidate marker to a predetermined square */
        Marker marker;
        marker.matrix = Mat( markerHeight, markerWidth, CV_8UC1 );
        std::copy( approxed.begin(), approxed.end(), back_inserter( marker.poly ) );
        
        /* Apply sub pixel search */
        cornerSubPix( thresholded, marker.poly, Size(5, 5), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 40, 0.001) );
        
        /* Projection target */
        const static vector<Point2f> target_corners = {
            Point2f( -0.5f, -0.5f ),
            Point2f( +5.5f, -0.5f ),
            Point2f( +5.5f, +5.5f ),
            Point2f( -0.5f, +5.5f ),
        };
        
        /* Apply perspective transformation, to project our 3D marker to a predefined 2D coords */
        Mat projection = getPerspectiveTransform( marker.poly, target_corners );
        warpPerspective( thresholded, marker.matrix, projection, marker.matrix.size() );
        
        /* Ignore those region that's fully black, or not surrounded by black bars */
        if( sum(marker.matrix) == Scalar(0) ||
           countNonZero( marker.matrix.row(0)) != 0 ||
           countNonZero( marker.matrix.row(markerHeight - 1)) != 0 ||
           countNonZero( marker.matrix.col(0)) != 0 ||
           countNonZero( marker.matrix.col(markerWidth - 1)) != 0 ) {
            continue;
        }
        
        
        /* Find the rotation that has the smallest hex value */
        pair<unsigned int, unsigned int> minimum = { numeric_limits<unsigned int>::max(), 0 };
        vector<unsigned int> codes(markerHeight);
        unsigned int power = 1 << (markerWidth - 3);
        
        /* Rotate the marker 4 times, store the hex code upon each rotation */
        for( int rotation = 0; rotation < 4; rotation++ ) {
            stringstream ss;
            codes[rotation] = 0;
            
            for( int i = 1; i < markerHeight - 1; i++ ) {
                unsigned int code = 0;
                for ( int j = 1; j < markerWidth - 1; j++ ){
                    int value = static_cast<int>(marker.matrix.at<uchar>(i, j));
                    if( value == 0 )
                        code = code + ( power >> j );
                }
                
                ss << hex << code;
            }
            ss >> codes[rotation];
            
            if( minimum.first > codes[rotation] ) {
                minimum.first  = codes[rotation];
                minimum.second = rotation;
            }
            
            flip( marker.matrix, marker.matrix, 1 );
            marker.matrix = marker.matrix.t();
        }
        
        
        rotate( marker.poly.begin(), marker.poly.begin() + ((minimum.second + 2) % 4), marker.poly.end() );
        for( int i = 0; i < minimum.second; i++ ) {
            flip( marker.matrix, marker.matrix, 1 );
            marker.matrix = marker.matrix.t();
        }
        
        marker.code = minimum.first;
        
        candidates.push_back( marker );
    }
    
    return candidates;
}
Beispiel #9
0
bool ObjPatchMatcher::Match(const Mat& cimg, const Mat& dmap_raw, Mat& mask_map) {

	/*
	 * precompute feature maps
	 */
	// gradient
	Mat gray_img, gray_img_float, edge_map;
	cvtColor(cimg, gray_img, CV_BGR2GRAY);
	gray_img.convertTo(gray_img_float, CV_32F, 1.f/255);
	Canny(gray_img, edge_map, 10, 50);
	cv::imshow("edge", edge_map);
	cv::imshow("color", cimg);
	cv::waitKey(10);

	Mat grad_x, grad_y, grad_mag;
	Sobel(gray_img_float, grad_x, CV_32F, 1, 0);
	Sobel(gray_img_float, grad_y, CV_32F, 0, 1);
	magnitude(grad_x, grad_y, grad_mag);

	// depth
	Mat dmap_float, pts3d, normal_map;
	if( use_depth ) {
		Feature3D feat3d;
		dmap_raw.convertTo(dmap_float, CV_32F);
		Mat cmp_mask;
		compare(dmap_float, 800, cmp_mask, CMP_LT);
		dmap_float.setTo(800, cmp_mask);
		compare(dmap_float, 7000, cmp_mask, CMP_GT);
		dmap_float.setTo(7000, cmp_mask);
		dmap_float = (dmap_float-800)/(7000-800);

		feat3d.ComputeKinect3DMap(dmap_float, pts3d, false);
		feat3d.ComputeNormalMap(pts3d, normal_map);
	}

	/*
	 *	start searching
	 */
	// init searcher
	//searcher.Build(patch_data, BruteForce_L2);	// opencv bfmatcher has size limit: maximum 2^31
	LSHCoder lsh_coder;
	if(use_code) {
		lsh_coder.Load();
	}
	
	Mat score_map = Mat::zeros(edge_map.rows, edge_map.cols, CV_32F);
	Mat mask_vote_map = Mat::zeros(cimg.rows, cimg.cols, CV_32F);
	mask_map = Mat::zeros(cimg.rows, cimg.cols, CV_32F);
	Mat mask_count = Mat::zeros(cimg.rows, cimg.cols, CV_32S);	// number of mask overlapped on each pixel
	Mat feat;
	int topK = 40;
	int total_cnt = countNonZero(edge_map);
	vector<VisualObject> query_patches;
	query_patches.reserve(total_cnt);

	cout<<"Start match..."<<endl;
	
	float max_dist = 0;
	int cnt = 0;
	char str[30];
	double start_t = getTickCount();
//#pragma omp parallel for
	for(int r=patch_size.height/2; r<gray_img.rows-patch_size.height/2; r+=3) {
		for(int c=patch_size.width/2; c<gray_img.cols-patch_size.width/2; c+=3) {

			/*int rand_r = rand()%gray_img.rows;
			int rand_c = rand()%gray_img.cols;
			if(rand_r < patch_size.height/2 || rand_r > gray_img.rows-patch_size.height/2 ||
				rand_c < patch_size.width/2 || rand_c > gray_img.cols-patch_size.width/2) continue;*/
			int rand_r = r, rand_c = c;

			if(edge_map.at<uchar>(rand_r, rand_c) > 0) 
			{
				cnt++;
				destroyAllWindows();

				Rect box(rand_c-patch_size.width/2, rand_r-patch_size.height/2, patch_size.width, patch_size.height);
				MatFeatureSet featset;
				gray_img_float(box).copyTo(featset["gray"]);
				//grad_mag(box).copyTo(featset["gradient"]);
				if(use_depth)
				{ 
					normal_map(box).copyTo(featset["normal"]);
					dmap_float(box).copyTo(featset["depth"]);
				}
				ComputePatchFeat(featset, feat);
				vector<DMatch> matches;
				if(use_code) 
				{
					BinaryCodes codes;
					HashKey key_val;
					lsh_coder.ComputeCodes(feat, codes);
					HashingTools<HashKeyType>::CodesToKey(codes, key_val);
					MatchCode(key_val, topK, matches);
				}
				else
				{
					MatchPatch(feat, topK, matches);
				}
				
				if(matches[0].distance < 0 || matches[0].distance > 1000) {
					cout<<"match dist: "<<matches[0].distance<<endl;
					double minv, maxv;
					cout<<norm(feat, patch_data.row(matches[0].trainIdx), NORM_L2)<<endl;
					minMaxLoc(feat, &minv, &maxv);
					cout<<minv<<" "<<maxv<<endl;
					cout<<feat<<endl<<endl;
					minMaxLoc(patch_data.row(matches[0].trainIdx), &minv, &maxv);
					cout<<minv<<" "<<maxv<<endl;
					cout<<patch_data.row(matches[0].trainIdx)<<endl;
					imshow("cimg", cimg);
					waitKey(0);
				}
				vector<vector<Mat>> pixel_mask_vals(patch_size.height, vector<Mat>(patch_size.width, Mat::zeros(1, topK, CV_32F)));
				VisualObject cur_query;
				cur_query.visual_data.bbox = box;
				cur_query.visual_data.mask = Mat::zeros(patch_size.height, patch_size.width, CV_32F);
				for(size_t i=0; i<topK; i++) { 
					score_map.at<float>(rand_r,rand_c) += matches[i].distance;
					cur_query.visual_data.mask += patch_meta.objects[matches[i].trainIdx].visual_data.mask;
					for(int mr=0; mr<patch_size.height; mr++) for(int mc=0; mc<patch_size.width; mc++) {
						pixel_mask_vals[mr][mc].at<float>(i) = 
							patch_meta.objects[matches[i].trainIdx].visual_data.mask.at<float>(mr, mc);
					}
				}
				score_map.at<float>(rand_r,rand_c) /= topK;
				cur_query.visual_data.mask /= topK;			// average returned mask
				
				// compute mask quality
				Scalar mean_, std_;
				/*ofstream out("pixel_mask_std_100.txt", ios::app);
				for(int mr=0; mr<patch_size.height; mr++) for(int mc=0; mc<patch_size.width; mc++) {
				meanStdDev(pixel_mask_vals[mr][mc], mean_, std_);
				out<<std_.val[0]<<" ";
				}
				out<<endl;*/
				meanStdDev(cur_query.visual_data.mask, mean_, std_);
				cur_query.visual_data.scores.push_back(mean_.val[0]);
				cur_query.visual_data.scores.push_back(std_.val[0]);

				Mat align_mask = Mat::zeros(cimg.rows, cimg.cols, CV_8U);
				int gt_mask_id = patch_meta.objects[matches[0].trainIdx].meta_data.category_id;
				if(gt_mask_id != -1) {
					Mat nn_mask = gt_obj_masks[gt_mask_id];
					//imshow("gt mask", nn_mask*255);
					//waitKey(10);
					Rect gt_box = patch_meta.objects[matches[0].trainIdx].visual_data.bbox;
					Rect align_box = AlignBox(box, gt_box, cimg.cols, cimg.rows);
					vector<ImgWin> boxes; boxes.push_back(align_box);
					//ImgVisualizer::DrawWinsOnImg("alignbox", cimg, boxes);
					//waitKey(10);
					Rect target_box = Rect(box.x-(gt_box.x-align_box.x), box.y-(gt_box.y-align_box.y), align_box.width, align_box.height);
					cout<<target_box<<endl;
					nn_mask(align_box).copyTo(align_mask(target_box));
				}
				align_mask.convertTo(align_mask, CV_32F);
				mask_map += align_mask * matches[0].distance;	//*score_map.at<float>(r,c);
				//mask_count(box) = mask_count(box) + 1;

				//cout<<score_map.at<float>(r,c)<<endl;
				max_dist = MAX(max_dist, matches[0].distance);
				query_patches.push_back(cur_query);

				// vote object regions
				/*Point3f line_ori;
				int obj_pt_sign;
				ComputeDominantLine(cur_query.visual_desc.mask, box.tl(), line_ori, obj_pt_sign);
				for(int rr=0; rr<cimg.rows; rr++) for(int cc=0; cc<cimg.cols; cc++) {
				float line_val = line_ori.x*cc+line_ori.y*rr+line_ori.z;
				if((line_val>0?1:-1)==obj_pt_sign) mask_vote_map.at<float>(rr, cc)++;
				}*/

#ifdef VERBOSE

				// current patch
				Mat disp, patch_gray, patch_grad, patch_normal, patch_depth;
				disp = cimg.clone();
				rectangle(disp, box, CV_RGB(255,0,0), 2);
				resize(gray_img(box), patch_gray, Size(50,50));
				resize(grad_mag(box), patch_grad, Size(50,50));
				Mat cur_mask;
				resize(cur_query.visual_desc.mask, cur_mask, Size(50,50));
				if(use_depth) 
				{
					resize(normal_map(box), patch_normal, Size(50,50));

					normalize(dmap_float(box), patch_depth, 1, 0, NORM_MINMAX);
					patch_depth.convertTo(patch_depth, CV_8U, 255);
					//dmap_float(box).convertTo(patch_depth, CV_8U, 255);
					resize(patch_depth, patch_depth, Size(50,50));
				}

				Mat onormal;
				sprintf_s(str, "query_gray_%d.jpg", cnt);
				imshow(str, patch_gray);
				imwrite(str, patch_gray);

				/*sprintf_s(str, "query_grad_%d.jpg", cnt);
				ImgVisualizer::DrawFloatImg(str, patch_grad, onormal, true);
				imwrite(str, onormal);*/
				
				sprintf_s(str, "query_depth_%d.jpg", cnt);
				imshow(str, patch_depth);
				imwrite(str, patch_depth);
				
				sprintf_s(str, "query_normal_%d.jpg", cnt);
				ImgVisualizer::DrawNormals(str, patch_normal, onormal, true);
				imwrite(str, onormal);

				sprintf_s(str, "query_box_%d.jpg", cnt);
				imshow(str, disp);
				imwrite(str, disp);

				//imshow("align mask", align_mask*255);

				cur_mask.convertTo(cur_mask, CV_8U, 255);
				sprintf_s(str, "query_tmask_%d.jpg", cnt);
				imshow(str, cur_mask);
				imwrite(str, cur_mask);

				// show match results
				vector<Mat> res_imgs(topK);
				vector<Mat> res_gradients(topK);
				vector<Mat> res_normals(topK);
				vector<Mat> res_depth(topK);
				vector<Mat> db_boxes(topK);
				vector<Mat> res_masks(topK);
				for(size_t i=0; i<topK; i++) {
					VisualObject& cur_obj = patch_meta.objects[matches[i].trainIdx];
					// mask
					cur_obj.visual_desc.mask.convertTo(res_masks[i], CV_8U, 255);
					// gray
					cur_obj.visual_desc.extra_features["gray"].convertTo(res_imgs[i], CV_8U, 255);
					// gradient
					//ImgVisualizer::DrawFloatImg("", cur_obj.visual_desc.extra_features["gradient"], res_gradients[i], false);
					// 3D
					if(use_depth) 
					{
						// normal
						tools::ImgVisualizer::DrawNormals("", cur_obj.visual_desc.extra_features["normal"], res_normals[i]);
						// depth
						normalize(cur_obj.visual_desc.extra_features["depth"], res_depth[i], 1, 0, NORM_MINMAX);
						res_depth[i].convertTo(res_depth[i], CV_8U, 255);
						//cur_obj.visual_desc.extra_features["depth"].convertTo(res_depth[i], CV_8U, 255);
					}
					// box on image
					db_boxes[i] = imread(patch_meta.objects[matches[i].trainIdx].imgpath);
					resize(db_boxes[i], db_boxes[i], Size(cimg.cols, cimg.rows));
					rectangle(db_boxes[i], patch_meta.objects[matches[i].trainIdx].visual_desc.box, CV_RGB(255,0,0), 2);
				}
				Mat out_img;
				sprintf_s(str, "res_gray_%d.jpg", cnt);
				ImgVisualizer::DrawImgCollection(str, res_imgs, topK, Size(50,50), out_img);
				imwrite(str, out_img);
				
				sprintf_s(str, "res_normal_%d.jpg", cnt);
				ImgVisualizer::DrawImgCollection(str, res_normals, topK, Size(50,50), out_img);
				imwrite(str, out_img);

				sprintf_s(str, "res_depth_%d.jpg", cnt);
				ImgVisualizer::DrawImgCollection(str, res_depth, topK, Size(50,50), out_img);
				imwrite(str, out_img);

				/*sprintf_s(str, "res_gradient_%d.jpg", cnt);
				tools::ImgVisualizer::DrawImgCollection(str, res_gradients, topK, Size(50,50), out_img);
				imwrite(str, out_img);*/

				sprintf_s(str, "res_mask_%d.jpg", cnt);
				tools::ImgVisualizer::DrawImgCollection(str, res_masks, topK, Size(50,50), out_img);
				imwrite(str, out_img);

				sprintf_s(str, "res_box_%d.jpg", cnt);
				tools::ImgVisualizer::DrawImgCollection(str, db_boxes, topK/2, Size(200, 200), out_img);
				imwrite(str, out_img);

				waitKey(0);
#endif

				cout<<total_cnt--<<endl;
			}
		}
	}
	cout<<"match done. Time cost: "<<(getTickCount()-start_t)/getTickFrequency()<<"s."<<endl;

	//score_map(Rect(patch_size.width/2, patch_size.height/2, score_map.cols-patch_size.width/2, score_map.rows-patch_size.height/2)).copyTo(score_map);
	//score_map.setTo(max_dist, 255-edge_map);
	normalize(score_map, score_map, 1, 0, NORM_MINMAX);
	score_map = 1-score_map;
	//tools::ImgVisualizer::DrawFloatImg("bmap", score_map);

	mask_map /= max_dist;
	cout<<max_dist<<endl;
	normalize(mask_map, mask_map, 1, 0, NORM_MINMAX);
	//tools::ImgVisualizer::DrawFloatImg("maskmap", mask_map);

	//normalize(mask_vote_map, mask_vote_map, 1, 0, NORM_MINMAX);
	//ImgVisualizer::DrawFloatImg("vote map", mask_vote_map);
	//waitKey(0);

	return true;

	// pick top weighted points to see if they are inside objects
	// try graph-cut for region proposal
	// among all retrieved mask patch, select most discriminative one and do graph-cut
	sort(query_patches.begin(), query_patches.end(), [](const VisualObject& a, const VisualObject& b) { 
		return a.visual_data.scores[1] > b.visual_data.scores[1]; });
	for(size_t i=0; i<query_patches.size(); i++) {
		Mat disp_img = cimg.clone();
		rectangle(disp_img, query_patches[i].visual_data.bbox, CV_RGB(255,0,0));
		imshow("max std box", disp_img);
		Mat big_mask;
		resize(query_patches[i].visual_data.mask, big_mask, Size(50,50));
		ImgVisualizer::DrawFloatImg("max std mask", big_mask);
		waitKey(0);
		// use mask to do graph-cut
		Mat fg_mask(cimg.rows, cimg.cols, CV_8U);
		fg_mask.setTo(cv::GC_PR_FGD);
		Mat th_mask;
		threshold(query_patches[i].visual_data.mask, th_mask, query_patches[i].visual_data.scores[0], 1, CV_THRESH_BINARY);
		th_mask.convertTo(th_mask, CV_8U);
		fg_mask(query_patches[i].visual_data.bbox).setTo(cv::GC_FGD, th_mask);
		th_mask = 1-th_mask;
		fg_mask(query_patches[i].visual_data.bbox).setTo(cv::GC_BGD, th_mask);
		cv::grabCut(cimg, fg_mask, Rect(0,0,1,1), Mat(), Mat(), 3, cv::GC_INIT_WITH_MASK);
		fg_mask = fg_mask & 1;
		disp_img.setTo(Vec3b(0,0,0));
		cimg.copyTo(disp_img, fg_mask);
		cv::imshow("cut", disp_img);
		cv::waitKey(0);
	}


	float ths[] = {0.9f, 0.8f, 0.7f, 0.6f, 0.5f, 0.4f, 0.3f, 0.2f};
	for(size_t i=0; i<8; i++) {
		Mat th_mask;
		threshold(mask_map, th_mask, ths[i], 1, CV_THRESH_BINARY);
		char str[30];
		sprintf_s(str, "%f", ths[i]);
		ImgVisualizer::DrawFloatImg(str, th_mask);
		waitKey(0);
	}

	return true;
}
Beispiel #10
0
/* Find cell soma */
bool findCellSoma( std::vector<cv::Point> nucleus_contour, 
                   cv::Mat cell_mask, 
                   cv::Mat *intersection, 
                   std::vector<cv::Point> *soma_contour ) {

    bool status = false;

    // Calculate the min bounding rectangle
    cv::RotatedRect min_area_rect = minAreaRect(cv::Mat(nucleus_contour));
    cv::RotatedRect scaled_rect   = minAreaRect(cv::Mat(nucleus_contour));

    // Nucleus' region of influence
    cv::Mat roi_mask = cv::Mat::zeros(cell_mask.size(), CV_8UC1);
    scaled_rect.size.width  = (float)(SOMA_FACTOR * scaled_rect.size.width);
    scaled_rect.size.height = (float)(SOMA_FACTOR * scaled_rect.size.height);
    ellipse(roi_mask, scaled_rect, 255, -1, 8);
    ellipse(roi_mask, min_area_rect, 0, -1, 8);
    int mask_score = countNonZero(roi_mask);

    // Soma present in ROI
    bitwise_and(roi_mask, cell_mask, *intersection);
    int intersection_score = countNonZero(*intersection);

    // Add the nucleus contour to intersection region
    ellipse(*intersection, min_area_rect, 255, -1, 8);

    // Add to the soma mask if coverage area exceeds a certain threshold
    float ratio = ((float) intersection_score) / mask_score;
    if (ratio >= SOMA_COVERAGE_RATIO) {

        // Segment
        cv::Mat soma_segmented;
        std::vector<std::vector<cv::Point>> contours_soma;
        std::vector<cv::Vec4i> hierarchy_soma;
        std::vector<HierarchyType> soma_contour_mask;
        std::vector<double> soma_contour_area;
        contourCalc(    *intersection, 
                        1.0, 
                        &soma_segmented, 
                        &contours_soma, 
                        &hierarchy_soma, 
                        &soma_contour_mask, 
                        &soma_contour_area
                   );

        double max_area  = 0.0;
        for (size_t i = 0; i < contours_soma.size(); i++) {
            if (soma_contour_mask[i] != HierarchyType::PARENT_CNTR) continue;
            if (contours_soma[i].size() < 5) continue;
            if (soma_contour_area[i] < MIN_SOMA_SIZE) continue;

            // Find the largest permissible contour
            if (soma_contour_area[i] > max_area) {
                max_area = soma_contour_area[i];
                *soma_contour = contours_soma[i];
                status = true;
            }
        }
    }
    return status;
}