Example #1
0
struct camera* db_select_cameras(int *ncams) {
  int i;
  int n_cameras;
  struct camera* cameras;
  PGresult *result = PQexec(conn, "SELECT id, url, code, analize_frames, threshold, motion_delay FROM cameras ORDER BY id;");
  if(PQresultStatus(result) != PGRES_TUPLES_OK) {
    fprintf(stderr, "No cameras found: %s\n", PQresultErrorMessage(result));
    PQclear(result);
  }

  n_cameras = PQntuples(result);
  cameras = (struct camera*)malloc(n_cameras * sizeof(struct camera));
  memset(cameras, 0, n_cameras * sizeof(struct camera));

  for(i=0; i < n_cameras; i++) {
    char *id = PQgetvalue(result, i, 0);
    char *url = PQgetvalue(result, i, 1);
    char *name = PQgetvalue(result, i, 2);
    char *analize_frames = PQgetvalue(result, i, 3);
    char *threshold = PQgetvalue(result, i, 4);
    char *motion_delay = PQgetvalue(result, i, 5);

    cameras[i].url = (char*)malloc(strlen(url)+1);
    cameras[i].name = (char*)malloc(strlen(name)+1);
    strcpy(cameras[i].url, url);
    strcpy(cameras[i].name, name);
    sscanf(id, "%d", &cameras[i].id);
    sscanf(analize_frames, "%d", &cameras[i].analize_frames);
    sscanf(threshold, "%d", &cameras[i].threshold);
    sscanf(motion_delay, "%d", &cameras[i].motion_delay);

    cameras[i].last_screenshot = 0;
    cameras[i].active = 1;
    cameras[i].cam_consumers_list = NULL;

    if(pthread_mutex_init(&cameras[i].consumers_lock, NULL) < 0) {
      fprintf(stderr, "pthread_mutex_init failed\n");
      exit(EXIT_FAILURE);
    }

    print_camera(&cameras[i]);
  }
  PQclear(result);

  *ncams = n_cameras;
  return cameras;
}
int main(int argc, char** argv)
{

  google::InitGoogleLogging(argv[0]);
  if (argc != 5)
  {
    std::cerr << "usage: monoExCal <3Dpoints_file> <observation_file> <intrinsic_file> <extrinsic_file>\n";
    return 1;
  }
  int num_points;
  int num_observations;
  observation o;

  // this code peforms extrinsic calibration on a monocular camera
  // it assumes that 3D data from a positioning device is available
  // this 3D data could come from an IGPS, a Fero arm, or any robot
  // each 3D point should be observed by the camera, and the image(x,y) position
  // of that observation must be known.
  // It is assumed that the intrinsic calibration is already known
  // The input is provided by 4 files
  // 1. 3D points stored as ascii in the form:
  //   num_points      # read as integer
  //   x[0] y[0] z[0]  # read as double
  //   ...
  //   x[num_points-1] y[num_points-1] z[num_points-1]
  // 2. Observations stored as ascii in the form
  //   num_observations  # read as integer
  //   x[0] y[0]         # read as double
  //   ...
  //   x[num_observations-1] y[num_observations-1] 
  // 3. Camera intrisic data stored as ascii in the ROS.ini format
  // 4. Camera initial extrinsic file stored as ascii indicating the homogeneous transform
  //  nx ox ax tx  #read all as double
  //  ny oy ay ty
  //  nz oz az tz
  //  0  0  0  1.0

  // read in the problem
  FILE *points_fp = fopen(argv[1], "r");
  FILE *observations_fp = fopen(argv[2], "r");
  FILE *intrinsics_fp = fopen(argv[3], "r");
  FILE *extrinsics_fp = fopen(argv[4], "r");
  if (points_fp == NULL)
  {
    printf("Could not open file: %s", argv[1]);
    exit(1);
  }
  if (observations_fp == NULL)
  {
    printf("Could not open file: %s", argv[2]);
    exit(1);
  }
  if (intrinsics_fp == NULL)
  {
    printf("Could not open file: %s", argv[3]);
    exit(1);
  }
  if (extrinsics_fp == NULL)
  {
    printf("Could not open file: %s", argv[4]);
    exit(1);
  }

  // first read points file
  if (fscanf(points_fp, "%d", &num_points) != 1)
  {
    printf("couldn't read num_points\n");
    exit(1);
  }
  std::vector<point> Pts;
  for (int i = 0; i < num_points; i++)
  {
    point p;
    if (fscanf(points_fp, "%lf %lf %lf", &p.x, &p.y, &p.z) != 3)
    {
      printf("couldn't read point %d from %s\n", i, argv[1]);
      exit(1);
    }
    Pts.push_back(p);
  }
  fclose(points_fp);

  // Then read in the observations
  if (fscanf(observations_fp, "%d", &num_observations) != 1)
  {
    printf("couldn't read num_observations\n");
    exit(1);
  }
  if (num_observations != num_points)
  {
    printf("WARNING, num_points NOT EQUAL to num_observations\n");
  }
  std::vector<observation> Ob;
  for (int i = 0; i < num_observations; i++)
  {
    if (fscanf(observations_fp, "%lf %lf", &o.x, &o.y) != 2)
    {
      printf("couldn't read observation %d from %s\n", i, argv[2]);
      exit(1);
    }
    o.p_id = i;
    Ob.push_back(o);
  }
  fclose(observations_fp);

  // read camera intrinsics
  Camera C;
  char dum[255];
  double Dum, Dum2, Dum3;
  int image_width;
  int image_height;
  int rtn = 0;
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "#"
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "Camera"
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "intrinsics"
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "[image]"
  //  printf("should be [image]: %s\n",dum);
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "width"
  //  printf("should be width: %s\n",dum);
  rtn += fscanf(intrinsics_fp, "%d", &image_width); // should be the image width of provided by camera
  printf("image_width: %d\n", image_width);
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "height"
  rtn += fscanf(intrinsics_fp, "%d", &image_height); // should be the image width of provided by camera
  printf("height: %d\n", image_height);
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "[some name]"
  //  printf("[some name]: %s\n",dum);
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "camera"
  //  printf("should be camera: %s\n",dum);
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "matrix"
  //  printf("should be matrix: %s\n",dum);
  rtn += fscanf(intrinsics_fp, "%lf %lf %lf", &C.fx, &Dum, &C.cx);
  rtn += fscanf(intrinsics_fp, "%lf %lf %lf", &Dum, &C.fy, &C.cy);
  rtn += fscanf(intrinsics_fp, "%lf %lf %lf", &Dum, &Dum2, &Dum3);
  rtn += fscanf(intrinsics_fp, "%s", dum); // should be "distortion" 
  printf("camera matrix:\n");
  printf("%8.3lf %8.3lf %8.3lf\n", C.fx, 0.0, C.cx);
  printf("%8.3lf %8.3lf %8.3lf\n", 0.0, C.fy, C.cy);
  printf("%8.3lf %8.3lf %8.3lf\n", 0.0, 0.0, 0.0);
  //  printf("should be distortion: %s\n",dum);
  rtn += fscanf(intrinsics_fp, "%lf %lf %lf %lf %lf", &C.k1, &C.k2, &C.k3, &C.p1, &C.p2);
  printf("Distortion: [ %8.3lf %8.3lf %8.3lf %8.3lf %8.3lf ]\n", C.k1, C.k2, C.k3, C.p1, C.p2);
  fclose(intrinsics_fp);

  // read camera extrinsics
  double H[4][4];
  rtn = 0;
  rtn += fscanf(extrinsics_fp, "%lf %lf %lf %lf", &H[0][0], &H[0][1], &H[0][2], &H[0][3]);
  rtn += fscanf(extrinsics_fp, "%lf %lf %lf %lf", &H[1][0], &H[1][1], &H[1][2], &H[1][3]);
  rtn += fscanf(extrinsics_fp, "%lf %lf %lf %lf", &H[2][0], &H[2][1], &H[2][2], &H[2][3]);
  rtn += fscanf(extrinsics_fp, "%lf %lf %lf %lf", &H[3][0], &H[3][1], &H[3][2], &H[3][3]);
  if (rtn != 16)
  {
    printf("could not read extrinsics rtn=%d from %s\n", rtn, argv[4]);
    exit(1);
  }
  fclose(extrinsics_fp);

  // use the inverse of transform from camera to world as camera transform
  double HI[9]; // note ceres uses column major order
  HI[0] = H[0][0]; // first column becomes first row
  HI[1] = H[0][1];
  HI[2] = H[0][2];

  HI[3] = H[1][0]; // second column becomes second row
  HI[4] = H[1][1];
  HI[5] = H[1][2];

  HI[6] = H[2][0]; // third column becomes third row
  HI[7] = H[2][1];
  HI[8] = H[2][2];
  C.pos[0] = -(H[0][3] * H[0][0] + H[1][3] * H[1][0] + H[2][3] * H[2][0]);
  C.pos[1] = -(H[0][3] * H[0][1] + H[1][3] * H[1][1] + H[2][3] * H[2][1]);
  C.pos[2] = -(H[0][3] * H[0][2] + H[1][3] * H[1][2] + H[2][3] * H[2][2]);
  printf("C.xyz = %lf %lf %lf\n", C.pos[0], C.pos[1], C.pos[2]);
  ceres::RotationMatrixToAngleAxis(HI, C.aa);

  /* used to create sample data with known solution
   FILE *fp6 = fopen("new_observations.txt","w");
   fprintf(fp6,"%d\n",num_points);
   for(int i=0;i<num_points;i++){
   o = project_point(C,Pts[i]);
   fprintf(fp6,"%lf %lf\n",o.x,o.y);
   }
   fclose(fp6); 
   exit(1);
   */
#define MONO_EXCAL_DEBUG
#ifdef MONO_EXCAL_DEBUG
  /* Print initial errors */
  /* Save projections and observations to matlab compatible form    */
  FILE *fp_temp1 = fopen("Obs.m", "w");
  FILE *fp_temp2 = fopen("Rep.m", "w");
  FILE *fp_temp3 = fopen("FRep.m", "w");
  fprintf(fp_temp1, "O = [ ");
  fprintf(fp_temp2, "R = [ ");
  fprintf(fp_temp3, "F = [ ");
  for (int i = 0; i < num_points; i++)
  {
    o = project_point(C, Pts[i]);
    printf("Errors %d  = %lf %lf\n", i, Ob[i].x - o.x, Ob[i].y - o.y);
    fprintf(fp_temp1, "%lf %lf;\n", Ob[i].x, Ob[i].y);
    fprintf(fp_temp2, "%lf %lf;\n", o.x, o.y);
  }
  fprintf(fp_temp1, "];\n");
  fprintf(fp_temp2, "];\n");
  fclose(fp_temp1);
  fclose(fp_temp2);
#endif  // MONO_EXCAL_DEBUG
  print_camera(C, "Original Parameters");
  // Create residuals for each observation in the bundle adjustment problem. The
  // parameters for cameras and points are added automatically.
  ceres::Problem problem;
  for (int i = 0; i < num_observations; ++i)
  {
    // Each Residual block takes a point and a camera as input and outputs a 2
    // dimensional residual. Internally, the cost function stores the observed
    // image location and compares the reprojection against the observation.
    ceres::CostFunction* cost_function = Camera_reprj_error::Create(Ob[i].x, Ob[i].y);

    problem.AddResidualBlock(cost_function, NULL, C.PB_extrinsics, C.PB_intrinsics, Pts[i].PB);
    problem.SetParameterBlockConstant(C.PB_intrinsics);
    problem.SetParameterBlockConstant(Pts[i].PB);
    /* DEBUG the reprojection error, this shows how to call reprojection error directly
     Camera_reprj_error CE(Ob[i].x,Ob[i].y);
     double res[2];
     CE(C.PB_extrinsics,C.PB_intrinsics,Pts[i].PB,res);
     printf("residual %d = %9.3lf %9.3lf\n",i,res[0],res[1]);
     */
  }

  // Make Ceres automatically detect the bundle structure. Note that the
  // standard solver, SPARSE_NORMAL_CHOLESKY, also works fine but it is slower
  // for standard bundle adjustment problems.
  ceres::Solver::Options options;
  options.linear_solver_type = ceres::DENSE_SCHUR;
  options.minimizer_progress_to_stdout = true;
  options.max_num_iterations = 1000;

  ceres::Solver::Summary summary;
  ceres::Solve(options, &problem, &summary);
  std::cout << summary.FullReport() << "\n";

#ifdef MONO_EXCAL_DEBUG
  /* Print final errors */
  for (int i = 0; i < num_points; i++)
  {
    o = project_point(C, Pts[i]);
    printf("%d : Ob= %6.3lf %6.3lf ", i, Ob[i].x, Ob[i].y);
    printf("%d : o= %6.3lf %6.3lf Errors = %10.3lf %10.3lf\n", i, o.x, o.y, Ob[i].x - o.x, Ob[i].y - o.y);
    fprintf(fp_temp3, "%lf %lf;\n", o.x, o.y);
  }
  fprintf(fp_temp3, "];\n");
  fclose(fp_temp3);
#endif  // MONO_EXCAL_DEBUG
  // Print final camera parameters 
  print_camera(C, "final parameters");

  // write new extrinsics to a file
  std::string temp(argv[4]);
  std::string new_ex_file = "new_" + temp;
  extrinsics_fp = fopen(new_ex_file.c_str(), "w");
  ceres::AngleAxisToRotationMatrix(C.aa, HI); // Column Major 

  // invert HI to get H
  H[0][0] = HI[0]; // first column of HI is set to first row of H
  H[0][1] = HI[1];
  H[0][2] = HI[2];

  H[1][0] = HI[3]; // second column of HI is set to second row of H
  H[1][1] = HI[4];
  H[1][2] = HI[5];

  H[2][0] = HI[6]; // third column of HI is set to third row of H
  H[2][1] = HI[7];
  H[2][2] = HI[8];

  H[0][3] = -(C.pos[0] * HI[0] + C.pos[1] * HI[1] + C.pos[2] * HI[2]);
  H[1][3] = -(C.pos[0] * HI[3] + C.pos[1] * HI[4] + C.pos[2] * HI[5]);
  H[2][3] = -(C.pos[0] * HI[6] + C.pos[1] * HI[7] + C.pos[2] * HI[8]);

  fprintf(extrinsics_fp, "%9.3lf %9.3lf %9.3lf %9.3lf\n", H[0][0], H[0][1], H[0][2], H[0][3]);
  fprintf(extrinsics_fp, "%9.3lf %9.3lf %9.3lf %9.3lf\n", H[1][0], H[1][1], H[1][2], H[1][3]);
  fprintf(extrinsics_fp, "%9.3lf %9.3lf %9.3lf %9.3lf\n", H[2][0], H[2][1], H[2][2], H[2][3]);
  fprintf(extrinsics_fp, "%9.3lf %9.3lf %9.3lf %9.3lf\n", 0.0, 0.0, 0.0, 1.0);
  fclose(extrinsics_fp);

  return 0;
}