Status KeyframeVisualOdometry::Update(const Image& image) {
  // Extract keypoints from the frame.
  std::vector<Keypoint> keypoints;
  Status status = GetKeypoints(image, &keypoints);
  if (!status.ok()) return status;

  // Extract features and descriptors from the keypoints.
  std::vector<Feature> features;
  std::vector<Descriptor> descriptors;
  status = GetDescriptors(image, &keypoints, &features, &descriptors);
  if (!status.ok()) return status;

  // Set the annotator's image.
  if (options_.draw_tracks || options_.draw_features) {
    annotator_.SetImage(image);
  }

  // Initialize the very first view if we don't have one yet.
  if (current_keyframe_ == kInvalidView) {
    Landmark::SetRequiredObservations(2);
    InitializeFirstView(features, descriptors);
    return Status::Ok();
  }

  // Initialize the second view if we don't have one yet using 2D<-->2D
  // matching.
  if (view_indices_.size() == 1) {
    status = InitializeSecondView(features, descriptors);
    if (status.ok()) {
      Landmark::SetRequiredObservations(options_.num_observations_to_triangulate);
    }
    return status;
  }

  // Create a camera with an unknown pose.
  Camera camera;
  camera.SetIntrinsics(intrinsics_);
  View::Ptr new_view = View::Create(camera);

  // Is this new image going to be a keyframe?
  const bool is_keyframe =
      initialize_new_keyframe_ ||
      NumEstimatedTracks() < options_.min_num_feature_tracks;
  if (is_keyframe) {
    initialize_new_keyframe_ = false;
  }

  // Update feature tracks and add matched features to the view.
  status = UpdateFeatureTracks(features,
                               descriptors,
                               new_view->Index(),
                               is_keyframe);
  if (!status.ok()) {
    View::DeleteMostRecentView();
    return status;
  }

  // Compute the new camera pose.
  status = EstimatePose(new_view->Index());
  if (!status.ok()) {
    View::DeleteMostRecentView();
  } else {
    view_indices_.push_back(new_view->Index());
  }

  if (is_keyframe && options_.perform_bundle_adjustment) {
    // Bundle adjust views in the sliding window.
    BundleAdjuster bundle_adjuster;
    if (!bundle_adjuster.Solve(options_.bundle_adjustment_options,
                               SlidingWindowViewIndices()))
      return Status::Cancelled("Failed to perform bundle adjustment.");
  }

  // Annotate tracks and features.
  if (options_.draw_features) {
    annotator_.AnnotateFeatures(features);
  }
  if (options_.draw_tracks) {
    annotator_.AnnotateTracks(tracks_);
  }
  if (options_.draw_tracks || options_.draw_features) {
    annotator_.Draw();
  }

  return status;
}
Ejemplo n.º 2
0
void CvOneWayDescriptor::EstimatePosePCA(CvArr* patch, int& pose_idx, float& distance, CvMat* avg, CvMat* eigenvectors) const
{
	if(avg == 0)
	{
		// do not use pca
		if (!CV_IS_MAT(patch))
		{
			EstimatePose((IplImage*)patch, pose_idx, distance);
		}
		else
		{

		}
		return;
	}
	CvRect roi;
	if (!CV_IS_MAT(patch))
	{
		roi = cvGetImageROI((IplImage*)patch);
		if(roi.width != GetPatchSize().width || roi.height != GetPatchSize().height)
		{
			cvResize(patch, m_input_patch);
			patch = m_input_patch;
			roi = cvGetImageROI((IplImage*)patch);
		}
	}

	CvMat* pca_coeffs = cvCreateMat(1, m_pca_dim_low, CV_32FC1);

	if (CV_IS_MAT(patch))
	{
		cvCopy((CvMat*)patch, pca_coeffs);
	}
	else
	{
		IplImage* patch_32f = cvCreateImage(cvSize(roi.width, roi.height), IPL_DEPTH_32F, 1);
		float sum = cvSum(patch).val[0];
		cvConvertScale(patch, patch_32f, 1.0f/sum);
		ProjectPCASample(patch_32f, avg, eigenvectors, pca_coeffs);
		cvReleaseImage(&patch_32f);
	}


	distance = 1e10;
	pose_idx = -1;

	for(int i = 0; i < m_pose_count; i++)
	{
		float dist = cvNorm(m_pca_coeffs[i], pca_coeffs);
		//		float dist = 0;
		//		float data1, data2;
		//		//CvMat* pose_pca_coeffs = m_pca_coeffs[i];
		//		for (int x=0; x < pca_coeffs->width; x++)
		//			for (int y =0 ; y < pca_coeffs->height; y++)
		//			{
		//				data1 = ((float*)(pca_coeffs->data.ptr + pca_coeffs->step*x))[y];
		//				data2 = ((float*)(m_pca_coeffs[i]->data.ptr + m_pca_coeffs[i]->step*x))[y];
		//				dist+=(data1-data2)*(data1-data2);
		//			}
		////#if 1
		//		for (int j = 0; j < m_pca_dim_low; j++)
		//		{
		//			dist += (pose_pca_coeffs->data.fl[j]- pca_coeffs->data.fl[j])*(pose_pca_coeffs->data.fl[j]- pca_coeffs->data.fl[j]);
		//		}
		//#else
		//		for (int j = 0; j <= m_pca_dim_low - 4; j += 4)
		//		{
		//			dist += (pose_pca_coeffs->data.fl[j]- pca_coeffs->data.fl[j])*
		//				(pose_pca_coeffs->data.fl[j]- pca_coeffs->data.fl[j]);
		//			dist += (pose_pca_coeffs->data.fl[j+1]- pca_coeffs->data.fl[j+1])*
		//				(pose_pca_coeffs->data.fl[j+1]- pca_coeffs->data.fl[j+1]);
		//			dist += (pose_pca_coeffs->data.fl[j+2]- pca_coeffs->data.fl[j+2])*
		//				(pose_pca_coeffs->data.fl[j+2]- pca_coeffs->data.fl[j+2]);
		//			dist += (pose_pca_coeffs->data.fl[j+3]- pca_coeffs->data.fl[j+3])*
		//				(pose_pca_coeffs->data.fl[j+3]- pca_coeffs->data.fl[j+3]);
		//		}
		//#endif
		if(dist < distance)
		{
			distance = dist;
			pose_idx = i;
		}
	}

	cvReleaseMat(&pca_coeffs);
}