static void
get_histogram (IplImage * src, CvPoint point, CvSize size,
    CvHistogram * histogram)
{
  IplImage *srcAux =
      cvCreateImage (cvSize (size.width, size.height), IPL_DEPTH_8U, 3);
  cvSetImageROI (src, cvRect (point.x, point.y, size.width, size.height));
  cvCopy (src, srcAux, 0);
  calc_histogram (srcAux, histogram);
  cvReleaseImage (&srcAux);
}
Ejemplo n.º 2
0
/**
 * @brief encapsulation of cv::caclHist, all of the meaning of the parameters are same as the cv::caclHist, but these api
 * are easier to use. ex : cv::Mat hist = calc_histogram({hsv}, {0, 1}, {180, 256}, { {0, 180}, {0, 256} });
 *
 * @return histogram
 */
inline
cv::Mat calc_histogram(std::initializer_list<cv::Mat> images, std::initializer_list<int> channels,
                       std::initializer_list<int> hist_sizes, std::initializer_list<float[2]> ranges,
                       cv::InputArray mask = cv::Mat(), bool uniform = true,
                       bool accumulate = false)
{
    cv::Mat output;
    calc_histogram(images, output, channels, hist_sizes, ranges, mask, uniform, accumulate);

    return output;
}
Ejemplo n.º 3
0
/**
  gets observations (histograms) for sampled particles and writes them into an array without receiving a new frame

  D E B U G   F U N C T I O N

  @param sampled_particles: pointer to array with sampled particles
  @param number_of_particles: number of particles
  @param observations: pointer to array of observations (histograms)
*/
void get_observations_without_new_frame ( particle * sampled_particles, int number_of_particles, histogram * observations) {

    int i,x1,x2,y1,y2;
    histogram * tmp = observations;


    // 1) read frame
    read_frame();

    // 2) create hsv image
    convert_bgr2hsv();

    // 3) for every particle: calculate histogram
    for (i=0; i<number_of_particles; i++) {

        // calculate start and end position of region
        x1 = (sampled_particles->x / PF_GRANULARITY)  - ((sampled_particles->s * (( sampled_particles->width - 1) / 2)) / PF_GRANULARITY);
        x2 = (sampled_particles->x / PF_GRANULARITY)  + ((sampled_particles->s * (( sampled_particles->width - 1) / 2)) / PF_GRANULARITY);

        y1 = (sampled_particles->y / PF_GRANULARITY)  - ((sampled_particles->s * (( sampled_particles->height - 1) / 2)) / PF_GRANULARITY);
        y2 = (sampled_particles->y / PF_GRANULARITY)  + ((sampled_particles->s * (( sampled_particles->height - 1) / 2)) / PF_GRANULARITY);

        // correct positions, if needed
        if (x1<0) {
            x1 = 0;
        }
        if (y1<0) {
            y1 = 0;
        }
        if (x2>SIZE_X-1) {
            x2 = SIZE_X - 1;
        }
        if (y2>SIZE_Y-1) {
            y2 = SIZE_Y - 1;
        }

        // calculate histogram
        calc_histogram(hsvImage, x1, y1, x2, y2, tmp);
        //calc_histogram(x1, y1, x2, y2, tmp);

        //4) normalize histogram
        normalize_histogram (tmp);

        // next histogram
        tmp++;
        sampled_particles++;

    }
}
int main( int argc, char** argv )
{
  IplImage* hsv_img;
  IplImage** hsv_ref_imgs;
  IplImage* l32f, * l;
  histogram* ref_histo;
  double max;
  int i;

  arg_parse( argc, argv );

  /* compute HSV histogram over all reference image */
  hsv_img = bgr2hsv( in_img );
  hsv_ref_imgs = (IplImage**)malloc( num_ref_imgs * sizeof( IplImage* ) );
  for( i = 0; i < num_ref_imgs; i++ )
    hsv_ref_imgs[i] = bgr2hsv( ref_imgs[i] );
  ref_histo = calc_histogram( hsv_ref_imgs, num_ref_imgs );
  normalize_histogram( ref_histo );

  /* compute likelihood at every pixel in input image */
  fprintf( stderr, "Computing likelihood... " );
  fflush( stderr );
  l32f = likelihood_image( hsv_img, ref_imgs[0]->width,
			   ref_imgs[0]->height, ref_histo );
  fprintf( stderr, "done\n");

  /* convert likelihood image to uchar and display */
  cvMinMaxLoc( l32f, NULL, &max, NULL, NULL, NULL );
  l = cvCreateImage( cvGetSize( l32f ), IPL_DEPTH_8U, 1 );
  cvConvertScale( l32f, l, 255.0 / max, 0 );
  cvNamedWindow( "likelihood", 1 );
  cvShowImage( "likelihood", l );
  cvNamedWindow( "image", 1 );
  cvShowImage( "image", in_img );
  cvWaitKey(0);
}
int main (int argc, char** argv)
{

  //ROS Initialization
  ros::init(argc, argv, "detecting_people");
  ros::NodeHandle nh;
  ros::Rate rate(13);

  ros::Subscriber state_sub = nh.subscribe("followme_state", 5, &stateCallback);
  ros::Publisher people_pub = nh.advertise<frmsg::people>("followme_people", 5);
  frmsg::people pub_people_;

  CloudConverter* cc_ = new CloudConverter();

  while (!cc_->ready_xyzrgb_)
  {
    ros::spinOnce();
    rate.sleep();
    if (!ros::ok())
    {
      printf("Terminated by Control-c.\n");
      return -1;
    }
  }

  // Input parameter from the .yaml
  std::string package_path_ = ros::package::getPath("detecting_people") + "/";
  cv::FileStorage* fs_ = new cv::FileStorage(package_path_ + "parameters.yml", cv::FileStorage::READ);

  // Algorithm parameters:
  std::string svm_filename = package_path_ + "trainedLinearSVMForPeopleDetectionWithHOG.yaml";
  std::cout << svm_filename << std::endl;

  float min_confidence = -1.5;
  float min_height = 1.3;
  float max_height = 2.3;
  float voxel_size = 0.06;
  Eigen::Matrix3f rgb_intrinsics_matrix;
  rgb_intrinsics_matrix << 525, 0.0, 319.5, 0.0, 525, 239.5, 0.0, 0.0, 1.0; // Kinect RGB camera intrinsics
  
  // Read if some parameters are passed from command line:
  pcl::console::parse_argument (argc, argv, "--svm", svm_filename);
  pcl::console::parse_argument (argc, argv, "--conf", min_confidence);
  pcl::console::parse_argument (argc, argv, "--min_h", min_height);
  pcl::console::parse_argument (argc, argv, "--max_h", max_height);


  // Read Kinect live stream:
  PointCloudT::Ptr cloud_people (new PointCloudT);
  cc_->ready_xyzrgb_ = false;
  while ( !cc_->ready_xyzrgb_ )
  {
    ros::spinOnce();
    rate.sleep();
  }
  pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr cloud = cc_->msg_xyzrgb_;

  // Display pointcloud:
  pcl::visualization::PointCloudColorHandlerRGBField<PointT> rgb(cloud);
  viewer.addPointCloud<PointT> (cloud, rgb, "input_cloud");
  viewer.setCameraPosition(0,0,-2,0,-1,0,0);

  // Add point picking callback to viewer:
  struct callback_args cb_args;
  PointCloudT::Ptr clicked_points_3d (new PointCloudT);
  cb_args.clicked_points_3d = clicked_points_3d;
  cb_args.viewerPtr = pcl::visualization::PCLVisualizer::Ptr(&viewer);
  viewer.registerPointPickingCallback (pp_callback, (void*)&cb_args);
  std::cout << "Shift+click on three floor points, then press 'Q'..." << std::endl;

  // Spin until 'Q' is pressed:
  viewer.spin();
  std::cout << "done." << std::endl;
  
  //cloud_mutex.unlock ();    

  // Ground plane estimation:
  Eigen::VectorXf ground_coeffs;
  ground_coeffs.resize(4);
  std::vector<int> clicked_points_indices;
  for (unsigned int i = 0; i < clicked_points_3d->points.size(); i++)
    clicked_points_indices.push_back(i);
  pcl::SampleConsensusModelPlane<PointT> model_plane(clicked_points_3d);
  model_plane.computeModelCoefficients(clicked_points_indices,ground_coeffs);
  std::cout << "Ground plane: " << ground_coeffs(0) << " " << ground_coeffs(1) << " " << ground_coeffs(2) << " " << ground_coeffs(3) << std::endl;

  // Initialize new viewer:
  pcl::visualization::PCLVisualizer viewer("PCL Viewer");          // viewer initialization
  viewer.setCameraPosition(0,0,-2,0,-1,0,0);

  // Create classifier for people detection:  
  pcl::people::PersonClassifier<pcl::RGB> person_classifier;
  person_classifier.loadSVMFromFile(svm_filename);   // load trained SVM

  // People detection app initialization:
  pcl::people::GroundBasedPeopleDetectionApp<PointT> people_detector;    // people detection object
  people_detector.setVoxelSize(voxel_size);                        // set the voxel size
  people_detector.setIntrinsics(rgb_intrinsics_matrix);            // set RGB camera intrinsic parameters
  people_detector.setClassifier(person_classifier);                // set person classifier
  people_detector.setHeightLimits(min_height, max_height);         // set person classifier
//  people_detector.setSensorPortraitOrientation(true);             // set sensor orientation to vertical

  // For timing:
  static unsigned count = 0;
  static double last = pcl::getTime ();
  
  int people_count = 0;
  histogram* first_hist;

  int max_people_num = (int)fs_->getFirstTopLevelNode()["max_people_num"];
  // Main loop:
  while (!viewer.wasStopped() && ros::ok() )
  {
    if ( current_state == 1 )
    {
      if ( cc_->ready_xyzrgb_ )    // if a new cloud is available
      {
    //    std::cout << "In state 1!!!!!!!!!!" << std::endl;

        std::vector<float> x;
        std::vector<float> y;
        std::vector<float> depth;

        cloud = cc_->msg_xyzrgb_;
        PointCloudT::Ptr cloud_new(new PointCloudT(*cloud));
        cc_->ready_xyzrgb_ = false;

        // Perform people detection on the new cloud:
        std::vector<pcl::people::PersonCluster<PointT> > clusters;   // vector containing persons clusters
        people_detector.setInputCloud(cloud_new);
        people_detector.setGround(ground_coeffs);                    // set floor coefficients
        people_detector.compute(clusters);                           // perform people detection

        ground_coeffs = people_detector.getGround();                 // get updated floor coefficients

        // Draw cloud and people bounding boxes in the viewer:
        viewer.removeAllPointClouds();
        viewer.removeAllShapes();
        pcl::visualization::PointCloudColorHandlerRGBField<PointT> rgb(cloud);
        viewer.addPointCloud<PointT> (cloud, rgb, "input_cloud");
        unsigned int k = 0;
        std::vector<pcl::people::PersonCluster<PointT> >::iterator it;
        std::vector<pcl::people::PersonCluster<PointT> >::iterator it_min;

        float min_z = 10.0;

        float histo_dist_min = 2.0;
        int id = -1;

        for(it = clusters.begin(); it != clusters.end(); ++it)
        {
          if(it->getPersonConfidence() > min_confidence)             // draw only people with confidence above a threshold
          {

            x.push_back((it->getTCenter())[0]);
            y.push_back((it->getTCenter())[1]);
            depth.push_back(it->getDistance());
            // draw theoretical person bounding box in the PCL viewer:
            /*pcl::copyPointCloud( *cloud, it->getIndices(), *cloud_people);
            if ( people_count == 0 )
            {
              first_hist = calc_histogram_a( cloud_people );
              people_count++;
              it->drawTBoundingBox(viewer, k);
            }
            else if ( people_count <= 10 )
            {
              histogram* hist_tmp = calc_histogram_a( cloud_people );
              add_hist( first_hist, hist_tmp );
              it->drawTBoundingBox(viewer, k);
              people_count++;
            }*/
            pcl::copyPointCloud( *cloud, it->getIndices(), *cloud_people);
            if ( people_count < 11 )
            {
              if ( it->getDistance() < min_z )
              {
                it_min = it;
                min_z = it->getDistance();
              }
            }
            else if ( people_count == 11 )
            {
              normalize_histogram( first_hist );
              people_count++;
              histogram* hist_tmp = calc_histogram( cloud_people );
              float tmp = histo_dist_sq( first_hist, hist_tmp );
              std::cout << "The histogram distance is " << tmp << std::endl;
              histo_dist_min = tmp;
              it_min = it;
              id = k;
            }
            else
            {
              histogram* hist_tmp = calc_histogram( cloud_people );
              float tmp = histo_dist_sq( first_hist, hist_tmp );
              std::cout << "The histogram distance is " << tmp << std::endl;
              if ( tmp < histo_dist_min )
              {
                histo_dist_min = tmp;
                it_min = it;
                id = k;
              }
            }
            k++;
            //std::cout << "The data of the center is " << cloud->points [(cloud->width >> 1) * (cloud->height + 1)].x << "  " << cloud->points [(cloud->width >> 1) * (cloud->height + 1)].y << " " << cloud->points [(cloud->width >> 1) * (cloud->height + 1)].z << std::endl;
            //std::cout << "The size of the people cloud is " <<  cloud_people->points.size() << std::endl;
            std::cout << "The " << k << " person's distance is " << it->getDistance() << std::endl;
          }
        }
        pub_people_.x = x;
        pub_people_.y = y;
        pub_people_.depth = depth;
        if ( k > 0 )
        {
          if ( people_count <= 11 )
          {
            pcl::copyPointCloud( *cloud, it_min->getIndices(), *cloud_people);
            if ( people_count == 0)
            {
              first_hist = calc_histogram_a( cloud_people );
              people_count++;
              it_min->drawTBoundingBox(viewer, 1);
            }
            else if ( people_count < 11 )
            {
              histogram* hist_tmp = calc_histogram_a( cloud_people );
              add_hist( first_hist, hist_tmp );
              it_min->drawTBoundingBox(viewer, 1);
              people_count++;
            }
          }
          else
          {
            pub_people_.id = k-1;
            if ( histo_dist_min < 1.3 )
            {
              it_min->drawTBoundingBox(viewer, 1);
              std::cout << "The minimum distance of the histogram is " << histo_dist_min << std::endl;
              std::cout << "The vector is " << it_min->getTCenter() << std::endl << "while the elements are " << (it_min->getTCenter())[0] << " " << (it_min->getTCenter())[1] << std::endl;
            }
            else
            {
              pub_people_.id = -1;
            }
          }
        }
        else
        {
          pub_people_.id = -1;
        }
        pub_people_.header.stamp = ros::Time::now();
        people_pub.publish(pub_people_);
        
        std::cout << k << " people found" << std::endl;
        viewer.spinOnce();
        ros::spinOnce();

        // Display average framerate:
        if (++count == 30)
        {
          double now = pcl::getTime ();
          std::cout << "Average framerate: " << double(count)/double(now - last) << " Hz" <<  std::endl;
          count = 0;
          last = now;
        }
        //cloud_mutex.unlock ();
      }
    }
    else
    {
      viewer.spinOnce();
      ros::spinOnce();
      // std::cout << "In state 0!!!!!!!!!" << std::endl;
    }
  }

  return 0;
}
static GstFlowReturn
kms_pointer_detector_transform_frame_ip (GstVideoFilter * filter,
    GstVideoFrame * frame)
{
  KmsPointerDetector *pointerdetector = KMS_POINTER_DETECTOR (filter);
  GstMapInfo info;
  double min_Bhattacharyya = 1.0, bhattacharyya = 1, bhattacharyya2 =
      1, bhattacharyya3 = 1;
  int i = 0;

  pointerdetector->frameSize = cvSize (frame->info.width, frame->info.height);
  kms_pointer_detector_initialize_images (pointerdetector, frame);

  gst_buffer_map (frame->buffer, &info, GST_MAP_READ);
  pointerdetector->cvImage->imageData = (char *) info.data;

  if ((pointerdetector->iteration > FRAMES_TO_RESET)
      && (pointerdetector->state != CAPTURING_SECOND_HIST)) {
    get_histogram (pointerdetector->cvImage, pointerdetector->upCornerRect1,
        pointerdetector->trackinRectSize, pointerdetector->histSetUpRef);
    pointerdetector->histRefCapturesCounter = 0;
    pointerdetector->secondHistCapturesCounter = 0;
    pointerdetector->state = CAPTURING_REF_HIST;
    pointerdetector->colorRect1 = WHITE;
    pointerdetector->colorRect2 = WHITE;
    pointerdetector->iteration = 6;
  }
  if (pointerdetector->iteration == 5) {
    get_histogram (pointerdetector->cvImage, pointerdetector->upCornerRect1,
        pointerdetector->trackinRectSize, pointerdetector->histSetUpRef);
    pointerdetector->state = CAPTURING_REF_HIST;
    goto end;
  }

  if (pointerdetector->iteration < 6)
    goto end;

  get_histogram (pointerdetector->cvImage, pointerdetector->upCornerRect1,
      pointerdetector->trackinRectSize, pointerdetector->histSetUp1);
  bhattacharyya2 =
      cvCompareHist (pointerdetector->histSetUp1,
      pointerdetector->histSetUpRef, CV_COMP_BHATTACHARYYA);
  if ((bhattacharyya2 >= COMPARE_THRESH_SECOND_HIST)
      && (pointerdetector->state == CAPTURING_REF_HIST)) {
    pointerdetector->histRefCapturesCounter++;
    if (pointerdetector->histRefCapturesCounter > 20) {
      pointerdetector->histRefCapturesCounter = 0;
      pointerdetector->colorRect1 = CV_RGB (0, 255, 0);
      pointerdetector->state = CAPTURING_SECOND_HIST;
    }
  }
  if (pointerdetector->state == CAPTURING_SECOND_HIST) {
    get_histogram (pointerdetector->cvImage, pointerdetector->upCornerRect2,
        pointerdetector->trackinRectSize, pointerdetector->histSetUp2);
    bhattacharyya3 =
        cvCompareHist (pointerdetector->histSetUp1,
        pointerdetector->histSetUp2, CV_COMP_BHATTACHARYYA);
    if (bhattacharyya3 < COMPARE_THRESH_2_RECT) {
      pointerdetector->secondHistCapturesCounter++;
      if (pointerdetector->secondHistCapturesCounter > 15) {
        pointerdetector->secondHistCapturesCounter = 0;
        pointerdetector->state = BOTH_HIST_SIMILAR;
        pointerdetector->colorRect2 = CV_RGB (0, 255, 0);
        cvCopyHist (pointerdetector->histSetUp2, &pointerdetector->histModel);
        pointerdetector->upCornerFinalRect.x = 10;
        pointerdetector->upCornerFinalRect.y = 10;
        pointerdetector->histRefCapturesCounter = 0;
        pointerdetector->secondHistCapturesCounter = 0;
      }
    }
  }
  for (i = 0; i < pointerdetector->numOfRegions; i++) {
    int horizOffset =
        pointerdetector->upCornerFinalRect.x +
        pointerdetector->windowScale * (rand () %
        pointerdetector->trackinRectSize.width -
        pointerdetector->trackinRectSize.width / 2);
    int vertOffset =
        pointerdetector->upCornerFinalRect.y +
        pointerdetector->windowScale * (rand () %
        pointerdetector->trackinRectSize.height -
        pointerdetector->trackinRectSize.height / 2);
    pointerdetector->trackingPoint1Aux.x = horizOffset;
    pointerdetector->trackingPoint1Aux.y = vertOffset;
    pointerdetector->trackingPoint2Aux.x =
        horizOffset + pointerdetector->trackinRectSize.width;
    pointerdetector->trackingPoint2Aux.y =
        vertOffset + pointerdetector->trackinRectSize.height;
    if ((horizOffset > 0)
        && (pointerdetector->trackingPoint2Aux.x <
            pointerdetector->cvImage->width)
        && (vertOffset > 0)
        && (pointerdetector->trackingPoint2Aux.y <
            pointerdetector->cvImage->height)) {
      if (pointerdetector->show_debug_info)
        cvRectangle (pointerdetector->cvImage,
            pointerdetector->trackingPoint1Aux,
            pointerdetector->trackingPoint2Aux, CV_RGB (0, 255, 0), 1, 8, 0);
      cvSetImageROI (pointerdetector->cvImage,
          cvRect (pointerdetector->trackingPoint1Aux.x,
              pointerdetector->trackingPoint1Aux.y,
              pointerdetector->trackinRectSize.width,
              pointerdetector->trackinRectSize.height));
      cvCopy (pointerdetector->cvImage, pointerdetector->cvImageAux1, 0);
      cvResetImageROI (pointerdetector->cvImage);
      calc_histogram (pointerdetector->cvImageAux1,
          pointerdetector->histCompare);
      bhattacharyya =
          cvCompareHist (pointerdetector->histModel,
          pointerdetector->histCompare, CV_COMP_BHATTACHARYYA);
      if ((bhattacharyya < min_Bhattacharyya)
          && (bhattacharyya < COMPARE_THRESH_HIST_REF)) {
        min_Bhattacharyya = bhattacharyya;
        pointerdetector->trackingPoint1 = pointerdetector->trackingPoint1Aux;
        pointerdetector->trackingPoint2 = pointerdetector->trackingPoint2Aux;
      }
    }
  }
  cvRectangle (pointerdetector->cvImage, pointerdetector->upCornerRect1,
      pointerdetector->downCornerRect1, pointerdetector->colorRect1, 1, 8, 0);
  cvRectangle (pointerdetector->cvImage, pointerdetector->upCornerRect2,
      pointerdetector->downCornerRect2, pointerdetector->colorRect2, 1, 8, 0);
  if (min_Bhattacharyya < 0.95) {
    pointerdetector->windowScale = pointerdetector->windowScaleRef;
  } else {
    pointerdetector->windowScale = pointerdetector->cvImage->width / 8;
  }
  CvPoint finalPointerPositionAux;

  finalPointerPositionAux.x = pointerdetector->upCornerFinalRect.x +
      pointerdetector->trackinRectSize.width / 2;
  finalPointerPositionAux.y = pointerdetector->upCornerFinalRect.y +
      pointerdetector->trackinRectSize.height / 2;
  if (abs (pointerdetector->finalPointerPosition.x -
          finalPointerPositionAux.x) < 55 ||
      abs (pointerdetector->finalPointerPosition.y -
          finalPointerPositionAux.y) < 55) {
    finalPointerPositionAux.x =
        (finalPointerPositionAux.x +
        pointerdetector->finalPointerPosition.x) / 2;
    finalPointerPositionAux.y =
        (finalPointerPositionAux.y +
        pointerdetector->finalPointerPosition.y) / 2;
  }
  pointerdetector->upCornerFinalRect = pointerdetector->trackingPoint1;
  pointerdetector->downCornerFinalRect = pointerdetector->trackingPoint2;

  pointerdetector->finalPointerPosition.x = finalPointerPositionAux.x;
  pointerdetector->finalPointerPosition.y = finalPointerPositionAux.y;

  cvCircle (pointerdetector->cvImage, pointerdetector->finalPointerPosition,
      10.0, WHITE, -1, 8, 0);

  kms_pointer_detector_check_pointer_position (pointerdetector);

end:
  pointerdetector->iteration++;
  gst_buffer_unmap (frame->buffer, &info);
  return GST_FLOW_OK;
}