bool BundlerApp::EstimateRelativePose2(int i1, int i2, camera_params_t &camera1, camera_params_t &camera2) { // int num_images = GetNumImages(); MatchIndex list_idx; if (i1 < i2) list_idx = GetMatchIndex(i1, i2); // i1 * num_images + i2; else list_idx = GetMatchIndex(i2, i1); // i2 * num_images + i1; std::vector<KeypointMatch> &matches = m_matches.GetMatchList(list_idx); // int num_matches = (int) m_match_lists[list_idx].size(); int num_matches = (int) matches.size(); // double f1 = m_image_data[i1].m_init_focal; // double f2 = m_image_data[i2].m_init_focal; double K1[9], K2[9]; GetIntrinsics(camera1, K1); GetIntrinsics(camera2, K2); double R0[9], t0[3]; int num_inliers = 0; if (!m_optimize_for_fisheye) { num_inliers = EstimatePose5Point(m_image_data[i1].m_keys, m_image_data[i2].m_keys, matches, 512, /* m_fmatrix_rounds, 8 * m_fmatrix_rounds */ 0.25 * m_fmatrix_threshold, // 0.003, // 0.004 /*0.001,*/ // /*0.5 **/ m_fmatrix_threshold, K1, K2, R0, t0); } else { std::vector<Keypoint> k1 = m_image_data[i1].UndistortKeysCopy(); std::vector<Keypoint> k2 = m_image_data[i2].UndistortKeysCopy(); num_inliers = EstimatePose5Point(k1, k2, matches, 1024, /*512*/ /* m_fmatrix_rounds, 8 * m_fmatrix_rounds */ 0.25 * m_fmatrix_threshold, // 0.004, /*0.001,*/ // /*0.5 **/ m_fmatrix_threshold, K1, K2, R0, t0); } if (num_inliers == 0) return false; printf(" Found %d / %d inliers (%0.3f%%)\n", num_inliers, num_matches, 100.0 * num_inliers / num_matches); bool initialized = false; if (!initialized) { memcpy(camera2.R, R0, sizeof(double) * 9); matrix_transpose_product(3, 3, 3, 1, R0, t0, camera2.t); matrix_scale(3, 1, camera2.t, -1.0, camera2.t); } return true; }
v3_t BundlerApp::GeneratePointAtInfinity(const ImageKeyVector &views, int *added_order, camera_params_t *cameras, double &error, bool explicit_camera_centers) { camera_params_t *cam = NULL; int camera_idx = views[0].first; int image_idx = added_order[camera_idx]; int key_idx = views[0].second; Keypoint &key = GetKey(image_idx, key_idx); cam = cameras + camera_idx; double p3[3] = { key.m_x, key.m_y, 1.0 }; if (m_optimize_for_fisheye) { /* Undistort the point */ double x = p3[0], y = p3[1]; m_image_data[image_idx].UndistortPoint(x, y, p3[0], p3[1]); } double K[9], Kinv[9]; GetIntrinsics(cameras[camera_idx], K); matrix_invert(3, K, Kinv); double ray[3]; matrix_product(3, 3, 3, 1, Kinv, p3, ray); /* We now have a ray, put it at infinity */ double ray_world[3]; matrix_transpose_product(3, 3, 3, 1, cam->R, ray, ray_world); double pos[3] = { 0.0, 0.0, 0.0 }; double pt_inf[3] = { 0.0, 0.0, 0.0 }; if (!explicit_camera_centers) { } else { memcpy(pos, cam->t, 3 * sizeof(double)); double ray_extend[3]; matrix_scale(3, 1, ray, 100.0, ray_extend); matrix_sum(3, 1, 3, 1, pos, ray, pt_inf); } return v3_new(pt_inf[0], pt_inf[1], pt_inf[2]); }
/* Triangulate a subtrack */ v3_t BundlerApp::TriangulateNViews(const ImageKeyVector &views, int *added_order, camera_params_t *cameras, double &error, bool explicit_camera_centers) { int num_views = (int) views.size(); v2_t *pv = new v2_t[num_views]; double *Rs = new double[9 * num_views]; double *ts = new double[3 * num_views]; for (int i = 0; i < num_views; i++) { camera_params_t *cam = NULL; int camera_idx = views[i].first; int image_idx = added_order[camera_idx]; int key_idx = views[i].second; Keypoint &key = GetKey(image_idx, key_idx); double p3[3] = { key.m_x, key.m_y, 1.0 }; if (m_optimize_for_fisheye) { /* Undistort the point */ double x = p3[0], y = p3[1]; m_image_data[image_idx].UndistortPoint(x, y, p3[0], p3[1]); } double K[9], Kinv[9]; GetIntrinsics(cameras[camera_idx], K); matrix_invert(3, K, Kinv); double p_n[3]; matrix_product(3, 3, 3, 1, Kinv, p3, p_n); // EDIT!!! pv[i] = v2_new(-p_n[0], -p_n[1]); pv[i] = UndistortNormalizedPoint(pv[i], cameras[camera_idx]); cam = cameras + camera_idx; memcpy(Rs + 9 * i, cam->R, 9 * sizeof(double)); if (!explicit_camera_centers) { memcpy(ts + 3 * i, cam->t, 3 * sizeof(double)); } else { matrix_product(3, 3, 3, 1, cam->R, cam->t, ts + 3 * i); matrix_scale(3, 1, ts + 3 * i, -1.0, ts + 3 * i); } } v3_t pt = triangulate_n(num_views, pv, Rs, ts, &error); error = 0.0; for (int i = 0; i < num_views; i++) { int camera_idx = views[i].first; int image_idx = added_order[camera_idx]; int key_idx = views[i].second; Keypoint &key = GetKey(image_idx, key_idx); v2_t pr = sfm_project_final(cameras + camera_idx, pt, explicit_camera_centers ? 1 : 0, m_estimate_distortion ? 1 : 0); if (m_optimize_for_fisheye) { double x = Vx(pr), y = Vy(pr); m_image_data[image_idx].DistortPoint(x, y, Vx(pr), Vy(pr)); } double dx = Vx(pr) - key.m_x; double dy = Vy(pr) - key.m_y; error += dx * dx + dy * dy; } error = sqrt(error / num_views); delete [] pv; delete [] Rs; delete [] ts; return pt; }
/* Triangulate two points */ v3_t Triangulate(v2_t p, v2_t q, camera_params_t c1, camera_params_t c2, double &proj_error, bool &in_front, double &angle, bool explicit_camera_centers) { double K1[9], K2[9]; double K1inv[9], K2inv[9]; GetIntrinsics(c1, K1); GetIntrinsics(c2, K2); matrix_invert(3, K1, K1inv); matrix_invert(3, K2, K2inv); /* Set up the 3D point */ // EDIT!!! double proj1[3] = { Vx(p), Vy(p), -1.0 }; double proj2[3] = { Vx(q), Vy(q), -1.0 }; double proj1_norm[3], proj2_norm[3]; matrix_product(3, 3, 3, 1, K1inv, proj1, proj1_norm); matrix_product(3, 3, 3, 1, K2inv, proj2, proj2_norm); v2_t p_norm = v2_new(proj1_norm[0] / proj1_norm[2], proj1_norm[1] / proj1_norm[2]); v2_t q_norm = v2_new(proj2_norm[0] / proj2_norm[2], proj2_norm[1] / proj2_norm[2]); /* Undo radial distortion */ p_norm = UndistortNormalizedPoint(p_norm, c1); q_norm = UndistortNormalizedPoint(q_norm, c2); /* Compute the angle between the rays */ angle = ComputeRayAngle(p, q, c1, c2); /* Triangulate the point */ v3_t pt; if (!explicit_camera_centers) { pt = triangulate(p_norm, q_norm, c1.R, c1.t, c2.R, c2.t, &proj_error); } else { double t1[3]; double t2[3]; /* Put the translation in standard form */ matrix_product(3, 3, 3, 1, c1.R, c1.t, t1); matrix_scale(3, 1, t1, -1.0, t1); matrix_product(3, 3, 3, 1, c2.R, c2.t, t2); matrix_scale(3, 1, t2, -1.0, t2); pt = triangulate(p_norm, q_norm, c1.R, t1, c2.R, t2, &proj_error); } proj_error = (c1.f + c2.f) * 0.5 * sqrt(proj_error * 0.5); /* Check cheirality */ bool cc1 = CheckCheirality(pt, c1); bool cc2 = CheckCheirality(pt, c2); in_front = (cc1 && cc2); return pt; }