Esempio n. 1
0
/// testing the following member methods
/// getDeltaX, getDeltaU, getZ, getDisparity
bool testGetDeltaXAndGetDeltaU() {
  CvStereoCamModel camModel;
  camModel.setCameraParams(FX, FY, TX, CLX, CRX, CY, DISPUNITSCALE);

  for (int i=1; i<100/DISPUNITSCALE; i++) {
    double disp = i;
    double du = 25.;
    double dx = camModel.getDeltaX(du, disp);
    double dx0 = du * TX/(disp*DISPUNITSCALE - (CLX - CRX));
    double diff = 0.;
    if ( dx != dx0 ) {
      printf("getDeltaX() test failed, du=%f, raw disp=%f, dx=%f\n",
          du, disp, dx);
      return false;
    }
    double z  = camModel.getZ(disp);
    double z0 = FX*TX/(DISPUNITSCALE*disp - (CLX - CRX));
    diff = fabs(z-z0);
    if (diff > .1e-10) {
      printf("getZ() test failed, disp = %f, z=%e, z0=%e, diff=%e\n",
          disp, z, z0, diff);
      return false;
    }
    // and convert it back
    double du0 = camModel.getDeltaU(dx, z);
    diff = fabs(du-du0);
    if ( diff > .1e-10) {
      printf("getDeltaU() test failed, dx=%f, z=%f, du=%e, du0=%e, diff=%e\n",
          dx, z, du, du0, diff);
      return false;
    }
  }

  return true;
}
Esempio n. 2
0
/// Testing cartToDisp and dispToCart
bool testSparsePointConversion() {
  CvStereoCamModel camModel;
  camModel.setCameraParams(FX, FY, TX, CLX, CRX, CY, DISPUNITSCALE);
  char filename[]= "test/pointsInCartesian.xml";

  CvMat* points_cart  = (CvMat*)cvLoad(filename);
  if (points_cart == NULL ) {
    printf("Cannot load data file: %s\n", filename);
    return false;
  }
  CvMat* points_cart0 = (CvMat*)cvCreateMat(points_cart->rows, points_cart->cols,
      CV_64FC1);
  CvMat* points_disp  = (CvMat*)cvCreateMat(points_cart->rows, points_cart->cols,
      CV_64FC1);
  CvMat* points_diff  = (CvMat*)cvCreateMat(points_cart->rows, points_cart->cols,
      CV_64FC1);

  camModel.cartToDisp(points_cart, points_disp);
  camModel.dispToCart(points_disp, points_cart0);

  cvAbsDiff(points_cart, points_cart0, points_diff);

  double min_val, max_val;

  cvMinMaxLoc(points_diff, &min_val, &max_val);

  if (max_val > .1e-10) {
    printf("dispToCart() or cartToDisp() failed: max diff = %f\n", max_val);
    return false;
  }

  cvReleaseMat(&points_cart);
  cvReleaseMat(&points_disp);
  cvReleaseMat(&points_diff);

  return true;
}
Esempio n. 3
0
/// testing the following member methods
/// getDeltaY, getDeltaV, getZ, getDisparity
bool testGetDeltaYAndGetDeltaV() {
  bool status = true;
  CvStereoCamModel camModel;
  camModel.setCameraParams(FX, FY, TX, CLX, CRX, CY, DISPUNITSCALE);

  for (int i=1; i<100/DISPUNITSCALE; i++) {
    double disp = i;
    double dv = 25.;
    double dy = camModel.getDeltaY(dv, disp);
    double dy0 = dv * TX * FX /(disp*DISPUNITSCALE - (CLX - CRX))/FY;
    double diff = 0.;
    diff = fabs(dy-dy0);
    if ( diff > .1e-10 ) {
      printf("getDeltaV() test failed, dv=%f, raw disp=%f, dy=%e, dy0=%e\n",
          dv, disp, dy, dy0);
      return false;
    }
    double z = camModel.getZ(disp);
    double z0 = FX*TX/(DISPUNITSCALE*disp - (CLX - CRX));
    diff = fabs(z-z0);
    if (diff > .1e-10) {
      printf("getZ() test failed, disp = %f, z=%e, z0=%e, diff=%e\n",
          disp, z, z0, diff);
      return false;
    }
    // and convert it back
    double dv0 = camModel.getDeltaV(dy, z);
    diff = fabs(dv-dv0);
    if ( diff > .1e-10) {
      printf("getDeltaU() test failed, dx=%f, z=%f, du=%e, du0=%e, diff=%e\n",
          dy, z, dv, dv0, diff);
      return false;
    }
  }

  return true;
}
Esempio n. 4
0
  /// The image callback. For each new image, copy it, perform face detection, and draw the rectangles on the image.
  void image_cb_all(ros::Time t)
  {

    detect_++;
    if (detect_ % 2) {
      return;
    }

    cv_mutex_.lock();
 
    CvSize im_size;

    // Convert the images to OpenCV format
    if (lbridge_.fromImage(limage_,"bgr")) {
      cv_image_left_ = lbridge_.toIpl();
    }
    if (dbridge_.fromImage(dimage_)) {
      cv_image_disp_ = dbridge_.toIpl();
    }

    // Convert the stereo calibration into a camera model.
    if (cam_model_) {
      delete cam_model_;
    }
    double Fx = rcinfo_.P[0];
    double Fy = rcinfo_.P[5];
    double Clx = rcinfo_.P[2];
    double Crx = Clx;
    double Cy = rcinfo_.P[6];
    double Tx = -rcinfo_.P[3]/Fx;
    cam_model_ = new CvStereoCamModel(Fx,Fy,Tx,Clx,Crx,Cy,1.0/(double)stinfo_.dpp);
 
    if ( cv_image_left_ )  {
      im_size = cvGetSize(cv_image_left_);
      vector<CvRect> faces_vector = people_->detectAllFaces(cv_image_left_, haar_filename_, 1.0, cv_image_disp_, cam_model_, true);

      // Get the average disparity in the middle half of the bounding box, and compute the face center in 3d. 
      // Publish the face center as a track point.
      if (cv_image_disp_ && cam_model_) {
	int r, c, good_pix;
	ushort* ptr;
	double avg_disp;
	CvRect *one_face;
	robot_msgs::PositionMeasurement pos;
	CvMat *uvd = cvCreateMat(1,3,CV_32FC1);
	CvMat *xyz = cvCreateMat(1,3,CV_32FC1);
	for (uint iface = 0; iface < faces_vector.size(); iface++) {
	  one_face = &faces_vector[iface];
	  good_pix = 0;
	  avg_disp = 0;
	  for (r = floor(one_face->y+0.25*one_face->height); r < floor(one_face->y+0.75*one_face->height); r++) {
	    ptr = (ushort*)(cv_image_disp_->imageData + r*cv_image_disp_->widthStep);
	    for (c = floor(one_face->x+0.25*one_face->width); c < floor(one_face->x+0.75*one_face->width); c++) {
	      if (ptr[c] > 0) {
		avg_disp += ptr[c];
		good_pix++;
	      }
	    }
	  }
	  avg_disp /= (double)good_pix; // Take the average.
	  cvmSet(uvd,0,0,one_face->x+one_face->width/2.0);
	  cvmSet(uvd,0,1,one_face->y+one_face->height/2.0);
	  cvmSet(uvd,0,2,avg_disp);
	  cam_model_->dispToCart(uvd,xyz);
	  
	  bool do_publish = true;
	  if (external_init_) {
	    // Check if this person's face is close enough to one of the previously known faces and associate it.
	    // If not, don't publish it.
	  }

	  if (do_publish) {
	    pos.header.stamp = limage_.header.stamp;
	    pos.name = "face_detection";
	    pos.object_id = "-1";
	    pos.pos.x = cvmGet(xyz,0,2);
	    pos.pos.y = -1.0*cvmGet(xyz,0,0);
	    pos.pos.z = -1.0*cvmGet(xyz,0,1);
	    pos.header.frame_id = "stereo_link";
	    pos.reliability = 0.8;
	    pos.initialization = 0;
	    //pos.covariance = ;
	    publish("person_measurement",pos);
	  }
	}

	cvReleaseMat(&uvd);
	cvReleaseMat(&xyz);	
      
	if (do_display_) {
	  if (!cv_image_disp_out_) {
	    cv_image_disp_out_ = cvCreateImage(im_size,IPL_DEPTH_8U,1);
	  }
	  cvCvtScale(cv_image_disp_,cv_image_disp_out_,4.0/stinfo_.dpp);

	  cvShowImage("Face detector: Face Detection",cv_image_left_);
	  cvShowImage("Face detector: Disparity",cv_image_disp_out_);
	}
      }
      
    }
    cv_mutex_.unlock();
  }