Пример #1
0
/* ************************************************************************* */
TEST (Serialization, gaussian_factor_graph) {
  GaussianFactorGraph graph;
  {
    Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.);
    Matrix A2 = (Matrix(2, 2) << 6., 0.2, 8., 0.4);
    Matrix R = (Matrix(2, 2) << 0.1, 0.3, 0.0, 0.34);
    Vector d(2); d << 0.2, 0.5;
    GaussianConditional cg(0, d, R, 1, A1, 2, A2);
    graph.push_back(cg);
  }

  {
    Key i1 = 4, i2 = 7;
    Matrix A1 = eye(3), A2 = -1.0 * eye(3);
    Vector b = ones(3);
    SharedDiagonal model = noiseModel::Diagonal::Sigmas((Vector(3) << 1.0, 2.0, 3.0));
    JacobianFactor jacobianfactor(i1, A1, i2, A2, b, model);
    HessianFactor hessianfactor(jacobianfactor);
    graph.push_back(jacobianfactor);
    graph.push_back(hessianfactor);
  }
  EXPECT(equalsObj(graph));
  EXPECT(equalsXML(graph));
  EXPECT(equalsBinary(graph));
}
Пример #2
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, multiplyHessianAdd2) {
  GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();

  // brute force
  Matrix AtA;
  Vector eta;
  boost::tie(AtA, eta) = gfg.hessian();
  Vector X(6);
  X << 1, 2, 3, 4, 5, 6;
  Vector Y(6);
  Y << -450, -450, 300, 400, 2950, 3450;
  EXPECT(assert_equal(Y, AtA * X));

  VectorValues x = map_list_of<Key, Vector>(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6));

  VectorValues expected;
  expected.insert(0, Vector2(-450, -450));
  expected.insert(1, Vector2(300, 400));
  expected.insert(2, Vector2(2950, 3450));

  VectorValues actual;
  gfg.multiplyHessianAdd(1.0, x, actual);
  EXPECT(assert_equal(expected, actual));

  // now, do it with non-zero y
  gfg.multiplyHessianAdd(1.0, x, actual);
  EXPECT(assert_equal(2 * expected, actual));
}
Пример #3
0
/* ************************************************************************* */
TEST( ISAM, iSAM_smoother )
{
  Ordering ordering;
  for (int t = 1; t <= 7; t++) ordering += X(t);

  // Create smoother with 7 nodes
  GaussianFactorGraph smoother = createSmoother(7);

  // run iSAM for every factor
  GaussianISAM actual;
  for(boost::shared_ptr<GaussianFactor> factor: smoother) {
    GaussianFactorGraph factorGraph;
    factorGraph.push_back(factor);
    actual.update(factorGraph);
  }

  // Create expected Bayes Tree by solving smoother with "natural" ordering
  GaussianBayesTree expected = *smoother.eliminateMultifrontal(ordering);

  // Verify sigmas in the bayes tree
  for(const GaussianBayesTree::sharedClique& clique: expected.nodes() | br::map_values) {
    GaussianConditional::shared_ptr conditional = clique->conditional();
    EXPECT(!conditional->get_model());
  }

  // Check whether BayesTree is correct
  EXPECT(assert_equal(GaussianFactorGraph(expected).augmentedHessian(), GaussianFactorGraph(actual).augmentedHessian()));

  // obtain solution
  VectorValues e; // expected solution
  for (int t = 1; t <= 7; t++) e.insert(X(t), Vector::Zero(2));
  VectorValues optimized = actual.optimize(); // actual solution
  EXPECT(assert_equal(e, optimized));
}
/* ************************************************************************* */
TEST( GaussianBayesTree, balanced_smoother_shortcuts )
{
  // Create smoother with 7 nodes
  GaussianFactorGraph smoother = createSmoother(7);

  // Create the Bayes tree
  Ordering ordering;
  ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4);
  GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering);

  // Check the conditional P(Root|Root)
  GaussianBayesNet empty;
  GaussianBayesTree::sharedClique R = bayesTree.roots().front();
  GaussianBayesNet actual1 = R->shortcut(R);
  EXPECT(assert_equal(empty,actual1,tol));

  // Check the conditional P(C2|Root)
  GaussianBayesTree::sharedClique C2 = bayesTree[X(3)];
  GaussianBayesNet actual2 = C2->shortcut(R);
  EXPECT(assert_equal(empty,actual2,tol));

  // Check the conditional P(C3|Root), which should be equal to P(x2|x4)
  /** TODO: Note for multifrontal conditional:
   * p_x2_x4 is now an element conditional of the multifrontal conditional bayesTree[ordering[X(2)]]->conditional()
   * We don't know yet how to take it out.
   */
//  GaussianConditional::shared_ptr p_x2_x4 = bayesTree[ordering[X(2)]]->conditional();
//  p_x2_x4->print("Conditional p_x2_x4: ");
//  GaussianBayesNet expected3(p_x2_x4);
//  GaussianISAM::sharedClique C3 = isamTree[ordering[X(1)]];
//  GaussianBayesNet actual3 = GaussianISAM::shortcut(C3,R);
//  EXPECT(assert_equal(expected3,actual3,tol));
}
/* ************************************************************************* */
int main() {
  gtsam::Key PoseKey1(11);
  gtsam::Key PoseKey2(12);
  gtsam::Key VelKey1(21);
  gtsam::Key VelKey2(22);
  gtsam::Key BiasKey1(31);

  double measurement_dt(0.1);
  Vector world_g(Vector3(0.0, 0.0, 9.81));
  Vector world_rho(Vector3(0.0, -1.5724e-05, 0.0)); // NED system
  gtsam::Vector ECEF_omega_earth(Vector3(0.0, 0.0, 7.292115e-5));
  gtsam::Vector world_omega_earth(world_R_ECEF.matrix() * ECEF_omega_earth);

  SharedGaussian model(noiseModel::Isotropic::Sigma(9, 0.1));

  // Second test: zero angular motion, some acceleration - generated in matlab
  Vector measurement_acc(Vector3(6.501390843381716,  -6.763926150509185,  -2.300389940090343));
  Vector measurement_gyro(Vector3(0.1, 0.2, 0.3));

  InertialNavFactor_GlobalVelocity<Pose3, Vector3, imuBias::ConstantBias> f(PoseKey1, VelKey1, BiasKey1, PoseKey2, VelKey2, measurement_acc, measurement_gyro, measurement_dt, world_g, world_rho, world_omega_earth, model);

  Rot3 R1(0.487316618,   0.125253866,   0.86419557,
       0.580273724,  0.693095498, -0.427669306,
      -0.652537293,  0.709880342,  0.265075427);
  Point3 t1(2.0,1.0,3.0);
  Pose3 Pose1(R1, t1);
  Vector3 Vel1 = Vector(Vector3(0.5,-0.5,0.4));
  Rot3 R2(0.473618898,   0.119523052,  0.872582019,
       0.609241153,   0.67099888, -0.422594037,
      -0.636011287,  0.731761397,  0.244979388);
  Point3 t2 = t1.compose( Point3(Vel1*measurement_dt) );
  Pose3 Pose2(R2, t2);
  Vector dv = measurement_dt * (R1.matrix() * measurement_acc + world_g);
  Vector3 Vel2 = Vel1 + dv;
  imuBias::ConstantBias Bias1;

  Values values;
  values.insert(PoseKey1, Pose1);
  values.insert(PoseKey2, Pose2);
  values.insert(VelKey1,  Vel1);
  values.insert(VelKey2,  Vel2);
  values.insert(BiasKey1, Bias1);

  Ordering ordering;
  ordering.push_back(PoseKey1);
  ordering.push_back(VelKey1);
  ordering.push_back(BiasKey1);
  ordering.push_back(PoseKey2);
  ordering.push_back(VelKey2);

  GaussianFactorGraph graph;
  gttic_(LinearizeTiming);
  for(size_t i = 0; i < 100000; ++i) {
    GaussianFactor::shared_ptr g = f.linearize(values);
    graph.push_back(g);
  }
  gttoc_(LinearizeTiming);
  tictoc_finishedIteration_();
  tictoc_print_();
}
Пример #6
0
/* ************************************************************************* */
TEST(HessianFactor, CombineAndEliminate2) {
  Matrix A01 = I_3x3;
  Vector3 b0(1.5, 1.5, 1.5);
  Vector3 s0(1.6, 1.6, 1.6);

  Matrix A10 = 2.0 * I_3x3;
  Matrix A11 = -2.0 * I_3x3;
  Vector3 b1(2.5, 2.5, 2.5);
  Vector3 s1(2.6, 2.6, 2.6);

  Matrix A21 = 3.0 * I_3x3;
  Vector3 b2(3.5, 3.5, 3.5);
  Vector3 s2(3.6, 3.6, 3.6);

  GaussianFactorGraph gfg;
  gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
  gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
  gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));

  Matrix93 A0, A1;
  A0 << A10, Z_3x3, Z_3x3;
  A1 << A11, A01, A21;
  Vector9 b, sigmas;
  b << b1, b0, b2;
  sigmas << s1, s0, s2;

  // create a full, uneliminated version of the factor
  JacobianFactor jacobian(0, A0, 1, A1, b,
      noiseModel::Diagonal::Sigmas(sigmas, true));

  // Make sure combining works
  HessianFactor hessian(gfg);
  EXPECT(assert_equal(HessianFactor(jacobian), hessian, 1e-6));
  EXPECT(
      assert_equal(jacobian.augmentedInformation(),
          hessian.augmentedInformation(), 1e-9));

  // perform elimination on jacobian
  Ordering ordering = list_of(0);
  GaussianConditional::shared_ptr expectedConditional;
  JacobianFactor::shared_ptr expectedFactor;
  boost::tie(expectedConditional, expectedFactor) = //
      jacobian.eliminate(ordering);

  // Eliminate
  GaussianConditional::shared_ptr actualConditional;
  HessianFactor::shared_ptr actualHessian;
  boost::tie(actualConditional, actualHessian) = //
      EliminateCholesky(gfg, ordering);

  EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
  VectorValues v;
  v.insert(1, Vector3(1, 2, 3));
  EXPECT_DOUBLES_EQUAL(expectedFactor->error(v), actualHessian->error(v), 1e-9);
  EXPECT(
      assert_equal(expectedFactor->augmentedInformation(),
          actualHessian->augmentedInformation(), 1e-9));
  EXPECT(assert_equal(HessianFactor(*expectedFactor), *actualHessian, 1e-6));
}
Пример #7
0
	/* ************************************************************************* */
	KalmanFilter::State KalmanFilter::init(const Vector& x0,
			const SharedDiagonal& P0) {

		// Create a factor graph f(x0), eliminate it into P(x0)
		GaussianFactorGraph factorGraph;
		factorGraph.add(0, I_, x0, P0); // |x-x0|^2_diagSigma
		return solve(factorGraph, useQR());
	}
Пример #8
0
// Create a planar factor graph and eliminate
double timePlanarSmootherEliminate(int N, bool old = true) {
  GaussianFactorGraph fg = planarGraph(N).get<0>();
  clock_t start = clock();
  fg.eliminateMultifrontal();
  clock_t end = clock ();
  double dif = (double)(end - start) / CLOCKS_PER_SEC;
  return dif;
}
Пример #9
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, gradientAtZero) {
  GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
  VectorValues expected;
  VectorValues actual = gfg.gradientAtZero();
  expected.insert(0, Vector2(-25, 17.5));
  expected.insert(1, Vector2(5, -13.5));
  expected.insert(2, Vector2(29, 4));
  EXPECT(assert_equal(expected, actual));
}
Пример #10
0
	/* ************************************************************************* */
	KalmanFilter::State KalmanFilter::init(const Vector& x, const Matrix& P0) {

		// Create a factor graph f(x0), eliminate it into P(x0)
		GaussianFactorGraph factorGraph;
		// 0.5*(x-x0)'*inv(Sigma)*(x-x0)
		HessianFactor::shared_ptr factor(new HessianFactor(0, x, P0));
		factorGraph.push_back(factor);
		return solve(factorGraph, useQR());
	}
Пример #11
0
// Create a Kalman smoother for t=1:T and optimize
double timeKalmanSmoother(int T) {
  GaussianFactorGraph smoother = createSmoother(T);
  clock_t start = clock();
  // Keys will come out sorted since keys() returns a set
  smoother.optimize(Ordering(smoother.keys()));
  clock_t end = clock ();
  double dif = (double)(end - start) / CLOCKS_PER_SEC;
  return dif;
}
Пример #12
0
 /* ************************************************************************* */
 GaussianFactorGraph GaussianFactorGraph::clone() const {
   GaussianFactorGraph result;
   BOOST_FOREACH(const sharedFactor& f, *this) {
     if (f)
       result.push_back(f->clone());
     else
       result.push_back(sharedFactor()); // Passes on null factors so indices remain valid
   }
   return result;
 }
/* ************************************************************************* */
TEST( GaussianBayesTree, balanced_smoother_joint )
{
  // Create smoother with 7 nodes
  Ordering ordering;
  ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4);
  GaussianFactorGraph smoother = createSmoother(7);

  // Create the Bayes tree, expected to look like:
  //   x5 x6 x4
  //     x3 x2 : x4
  //       x1 : x2
  //     x7 : x6
  GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering);

  // Conditional density elements reused by both tests
  const Matrix I = eye(2), A = -0.00429185*I;

  // Check the joint density P(x1,x7) factored as P(x1|x7)P(x7)
  GaussianBayesNet expected1 = list_of
    // Why does the sign get flipped on the prior?
    (GaussianConditional(X(1), zero(2), I/sigmax7, X(7), A/sigmax7))
    (GaussianConditional(X(7), zero(2), -1*I/sigmax7));
  GaussianBayesNet actual1 = *bayesTree.jointBayesNet(X(1),X(7));
  EXPECT(assert_equal(expected1, actual1, tol));

  //  // Check the joint density P(x7,x1) factored as P(x7|x1)P(x1)
  //  GaussianBayesNet expected2;
  //  GaussianConditional::shared_ptr
  //      parent2(new GaussianConditional(X(1), zero(2), -1*I/sigmax1, ones(2)));
  //    expected2.push_front(parent2);
  //  push_front(expected2,X(7), zero(2), I/sigmax1, X(1), A/sigmax1, sigma);
  //  GaussianBayesNet actual2 = *bayesTree.jointBayesNet(X(7),X(1));
  //  EXPECT(assert_equal(expected2,actual2,tol));

  // Check the joint density P(x1,x4), i.e. with a root variable
  double sig14 = 0.784465;
  Matrix A14 = -0.0769231*I;
  GaussianBayesNet expected3 = list_of
    (GaussianConditional(X(1), zero(2), I/sig14, X(4), A14/sig14))
    (GaussianConditional(X(4), zero(2), I/sigmax4));
  GaussianBayesNet actual3 = *bayesTree.jointBayesNet(X(1),X(4));
  EXPECT(assert_equal(expected3,actual3,tol));

  //  // Check the joint density P(x4,x1), i.e. with a root variable, factored the other way
  //  GaussianBayesNet expected4;
  //  GaussianConditional::shared_ptr
  //      parent4(new GaussianConditional(X(1), zero(2), -1.0*I/sigmax1, ones(2)));
  //    expected4.push_front(parent4);
  //  double sig41 = 0.668096;
  //  Matrix A41 = -0.055794*I;
  //  push_front(expected4,X(4), zero(2), I/sig41, X(1), A41/sig41, sigma);
  //  GaussianBayesNet actual4 = *bayesTree.jointBayesNet(X(4),X(1));
  //  EXPECT(assert_equal(expected4,actual4,tol));
}
/* ************************************************************************* */
bool check_smoother(const NonlinearFactorGraph& fullgraph, const Values& fullinit, const IncrementalFixedLagSmoother& smoother, const Key& key) {

  GaussianFactorGraph linearized = *fullgraph.linearize(fullinit);
  VectorValues delta = linearized.optimize();
  Values fullfinal = fullinit.retract(delta);

  Point2 expected = fullfinal.at<Point2>(key);
  Point2 actual = smoother.calculateEstimate<Point2>(key);

  return assert_equal(expected, actual);
}
Пример #15
0
/* ************************************************************************* */
TEST(HessianFactor, CombineAndEliminate)
{
  Matrix A01 = (Matrix(3,3) <<
      1.0, 0.0, 0.0,
      0.0, 1.0, 0.0,
      0.0, 0.0, 1.0);
  Vector b0 = (Vector(3) << 1.5, 1.5, 1.5);
  Vector s0 = (Vector(3) << 1.6, 1.6, 1.6);

  Matrix A10 = (Matrix(3,3) <<
      2.0, 0.0, 0.0,
      0.0, 2.0, 0.0,
      0.0, 0.0, 2.0);
  Matrix A11 = (Matrix(3,3) <<
      -2.0, 0.0, 0.0,
      0.0, -2.0, 0.0,
      0.0, 0.0, -2.0);
  Vector b1 = (Vector(3) << 2.5, 2.5, 2.5);
  Vector s1 = (Vector(3) << 2.6, 2.6, 2.6);

  Matrix A21 = (Matrix(3,3) <<
      3.0, 0.0, 0.0,
      0.0, 3.0, 0.0,
      0.0, 0.0, 3.0);
  Vector b2 = (Vector(3) << 3.5, 3.5, 3.5);
  Vector s2 = (Vector(3) << 3.6, 3.6, 3.6);

  GaussianFactorGraph gfg;
  gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
  gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
  gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));

  Matrix zero3x3 = zeros(3,3);
  Matrix A0 = gtsam::stack(3, &A10, &zero3x3, &zero3x3);
  Matrix A1 = gtsam::stack(3, &A11, &A01, &A21);
  Vector b = gtsam::concatVectors(3, &b1, &b0, &b2);
  Vector sigmas = gtsam::concatVectors(3, &s1, &s0, &s2);

  // create a full, uneliminated version of the factor
  JacobianFactor expectedFactor(0, A0, 1, A1, b, noiseModel::Diagonal::Sigmas(sigmas, true));

  // perform elimination on jacobian
  GaussianConditional::shared_ptr expectedConditional;
  JacobianFactor::shared_ptr expectedRemainingFactor;
  boost::tie(expectedConditional, expectedRemainingFactor) = expectedFactor.eliminate(Ordering(list_of(0)));

  // Eliminate
  GaussianConditional::shared_ptr actualConditional;
  HessianFactor::shared_ptr actualCholeskyFactor;
  boost::tie(actualConditional, actualCholeskyFactor) = EliminateCholesky(gfg, Ordering(list_of(0)));

  EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
  EXPECT(assert_equal(HessianFactor(*expectedRemainingFactor), *actualCholeskyFactor, 1e-6));
}
Пример #16
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, hessianDiagonal) {
  GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
  VectorValues expected;
  Matrix infoMatrix = gfg.hessian().first;
  Vector d = infoMatrix.diagonal();

  VectorValues actual = gfg.hessianDiagonal();
  expected.insert(0, d.segment<2>(0));
  expected.insert(1, d.segment<2>(2));
  expected.insert(2, d.segment<2>(4));
  EXPECT(assert_equal(expected, actual));
}
Пример #17
0
/**
 * TEST gtsam solver with an over-constrained system
 *  x + y = 1
 *  x - y = 5
 *  x + 2y = 6
 */
TEST(LPSolver, overConstrainedLinearSystem) {
  GaussianFactorGraph graph;
  Matrix A1 = Vector3(1, 1, 1);
  Matrix A2 = Vector3(1, -1, 2);
  Vector b = Vector3(1, 5, 6);
  JacobianFactor factor(1, A1, 2, A2, b, noiseModel::Constrained::All(3));
  graph.push_back(factor);

  VectorValues x = graph.optimize();
  // This check confirms that gtsam linear constraint solver can't handle
  // over-constrained system
  CHECK(factor.error(x) != 0.0);
}
Пример #18
0
	/* ************************************************************************* */
	KalmanFilter::State fuse(const KalmanFilter::State& p,
			GaussianFactor* newFactor, bool useQR) {

		// Create a factor graph
		GaussianFactorGraph factorGraph;

		// push back previous solution and new factor
		factorGraph.push_back(p->toFactor());
		factorGraph.push_back(GaussianFactor::shared_ptr(newFactor));

		// Eliminate graph in order x0, x1, to get Bayes net P(x0|x1)P(x1)
		return solve(factorGraph, useQR);
	}
Пример #19
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, matricesMixed) {
  GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
  Matrix A;
  Vector b;
  boost::tie(A, b) = gfg.jacobian();  // incorrect !
  Matrix AtA;
  Vector eta;
  boost::tie(AtA, eta) = gfg.hessian();  // correct
  EXPECT(assert_equal(A.transpose() * A, AtA));
  Vector expected = -(Vector(6) << -25, 17.5, 5, -13.5, 29, 4).finished();
  EXPECT(assert_equal(expected, eta));
  EXPECT(assert_equal(A.transpose() * b, eta));
}
Пример #20
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, negate) {
  GaussianFactorGraph init_graph = createGaussianFactorGraphWithHessianFactor();
  init_graph.push_back(GaussianFactor::shared_ptr());  /// Add null factor
  GaussianFactorGraph actNegation = init_graph.negate();
  GaussianFactorGraph expNegation;
  expNegation.push_back(init_graph.at(0)->negate());
  expNegation.push_back(init_graph.at(1)->negate());
  expNegation.push_back(init_graph.at(2)->negate());
  expNegation.push_back(init_graph.at(3)->negate());
  expNegation.push_back(init_graph.at(4)->negate());
  expNegation.push_back(GaussianFactor::shared_ptr());
  EXPECT(assert_equal(expNegation, actNegation));
}
Пример #21
0
/* ************************************************************************* */
TEST(HessianFactor, CombineAndEliminate1) {
  Matrix3 A01 = 3.0 * I_3x3;
  Vector3 b0(1, 0, 0);

  Matrix3 A21 = 4.0 * I_3x3;
  Vector3 b2 = Vector3::Zero();

  GaussianFactorGraph gfg;
  gfg.add(1, A01, b0);
  gfg.add(1, A21, b2);

  Matrix63 A1;
  A1 << A01, A21;
  Vector6 b;
  b << b0, b2;

  // create a full, uneliminated version of the factor
  JacobianFactor jacobian(1, A1, b);

  // Make sure combining works
  HessianFactor hessian(gfg);
  VectorValues v;
  v.insert(1, Vector3(1, 0, 0));
  EXPECT_DOUBLES_EQUAL(jacobian.error(v), hessian.error(v), 1e-9);
  EXPECT(assert_equal(HessianFactor(jacobian), hessian, 1e-6));
  EXPECT(assert_equal(25.0 * I_3x3, hessian.information(), 1e-9));
  EXPECT(
      assert_equal(jacobian.augmentedInformation(),
          hessian.augmentedInformation(), 1e-9));

  // perform elimination on jacobian
  Ordering ordering = list_of(1);
  GaussianConditional::shared_ptr expectedConditional;
  JacobianFactor::shared_ptr expectedFactor;
  boost::tie(expectedConditional, expectedFactor) = //
      jacobian.eliminate(ordering);

  // Eliminate
  GaussianConditional::shared_ptr actualConditional;
  HessianFactor::shared_ptr actualHessian;
  boost::tie(actualConditional, actualHessian) = //
      EliminateCholesky(gfg, ordering);

  EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
  EXPECT_DOUBLES_EQUAL(expectedFactor->error(v), actualHessian->error(v), 1e-9);
  EXPECT(
      assert_equal(expectedFactor->augmentedInformation(),
          actualHessian->augmentedInformation(), 1e-9));
  EXPECT(assert_equal(HessianFactor(*expectedFactor), *actualHessian, 1e-6));
}
Пример #22
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, transposeMultiplication) {
  GaussianFactorGraph A = createSimpleGaussianFactorGraph();

  Errors e;
  e += Vector2(0.0, 0.0), Vector2(15.0, 0.0), Vector2(0.0, -5.0), Vector2(-7.5, -5.0);

  VectorValues expected;
  expected.insert(1, Vector2(-37.5, -50.0));
  expected.insert(2, Vector2(-150.0, 25.0));
  expected.insert(0, Vector2(187.5, 25.0));

  VectorValues actual = A.transposeMultiply(e);
  EXPECT(assert_equal(expected, actual));
}
/* ************************************************************************* *
 Bayes tree for smoother with "nested dissection" ordering:

   Node[x1] P(x1 | x2)
   Node[x3] P(x3 | x2 x4)
   Node[x5] P(x5 | x4 x6)
   Node[x7] P(x7 | x6)
   Node[x2] P(x2 | x4)
   Node[x6] P(x6 | x4)
   Node[x4] P(x4)

 becomes

   C1     x5 x6 x4
   C2      x3 x2 : x4
   C3        x1 : x2
   C4      x7 : x6

************************************************************************* */
TEST( GaussianBayesTree, balanced_smoother_marginals )
{
  // Create smoother with 7 nodes
  GaussianFactorGraph smoother = createSmoother(7);

  // Create the Bayes tree
  Ordering ordering;
  ordering += X(1),X(3),X(5),X(7),X(2),X(6),X(4);
  GaussianBayesTree bayesTree = *smoother.eliminateMultifrontal(ordering);

  VectorValues actualSolution = bayesTree.optimize();
  VectorValues expectedSolution = VectorValues::Zero(actualSolution);
  EXPECT(assert_equal(expectedSolution,actualSolution,tol));

  LONGS_EQUAL(4, (long)bayesTree.size());

  double tol=1e-5;

  // Check marginal on x1
  JacobianFactor expected1 = GaussianDensity::FromMeanAndStddev(X(1), zero(2), sigmax1);
  JacobianFactor actual1 = *bayesTree.marginalFactor(X(1));
  Matrix expectedCovarianceX1 = eye(2,2) * (sigmax1 * sigmax1);
  Matrix actualCovarianceX1;
  GaussianFactor::shared_ptr m = bayesTree.marginalFactor(X(1), EliminateCholesky);
  actualCovarianceX1 = bayesTree.marginalFactor(X(1), EliminateCholesky)->information().inverse();
  EXPECT(assert_equal(expectedCovarianceX1, actualCovarianceX1, tol));
  EXPECT(assert_equal(expected1,actual1,tol));

  // Check marginal on x2
  double sigx2 = 0.68712938; // FIXME: this should be corrected analytically
  JacobianFactor expected2 = GaussianDensity::FromMeanAndStddev(X(2), zero(2), sigx2);
  JacobianFactor actual2 = *bayesTree.marginalFactor(X(2));
  EXPECT(assert_equal(expected2,actual2,tol));

  // Check marginal on x3
  JacobianFactor expected3 = GaussianDensity::FromMeanAndStddev(X(3), zero(2), sigmax3);
  JacobianFactor actual3 = *bayesTree.marginalFactor(X(3));
  EXPECT(assert_equal(expected3,actual3,tol));

  // Check marginal on x4
  JacobianFactor expected4 = GaussianDensity::FromMeanAndStddev(X(4), zero(2), sigmax4);
  JacobianFactor actual4 = *bayesTree.marginalFactor(X(4));
  EXPECT(assert_equal(expected4,actual4,tol));

  // Check marginal on x7 (should be equal to x1)
  JacobianFactor expected7 = GaussianDensity::FromMeanAndStddev(X(7), zero(2), sigmax7);
  JacobianFactor actual7 = *bayesTree.marginalFactor(X(7));
  EXPECT(assert_equal(expected7,actual7,tol));
}
Пример #24
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, matrices) {
  // Create factor graph:
  // x1 x2 x3 x4 x5  b
  //  1  2  3  0  0  4
  //  5  6  7  0  0  8
  //  9 10  0 11 12 13
  //  0  0  0 14 15 16

  Matrix A00 = (Matrix(2, 3) << 1, 2, 3, 5, 6, 7).finished();
  Matrix A10 = (Matrix(2, 3) << 9, 10, 0, 0, 0, 0).finished();
  Matrix A11 = (Matrix(2, 2) << 11, 12, 14, 15).finished();

  GaussianFactorGraph gfg;
  SharedDiagonal model = noiseModel::Unit::Create(2);
  gfg.add(0, A00, Vector2(4., 8.), model);
  gfg.add(0, A10, 1, A11, Vector2(13., 16.), model);

  Matrix Ab(4, 6);
  Ab << 1, 2, 3, 0, 0, 4, 5, 6, 7, 0, 0, 8, 9, 10, 0, 11, 12, 13, 0, 0, 0, 14, 15, 16;

  // augmented versions
  EXPECT(assert_equal(Ab, gfg.augmentedJacobian()));
  EXPECT(assert_equal(Ab.transpose() * Ab, gfg.augmentedHessian()));

  // jacobian
  Matrix A = Ab.leftCols(Ab.cols() - 1);
  Vector b = Ab.col(Ab.cols() - 1);
  Matrix actualA;
  Vector actualb;
  boost::tie(actualA, actualb) = gfg.jacobian();
  EXPECT(assert_equal(A, actualA));
  EXPECT(assert_equal(b, actualb));

  // hessian
  Matrix L = A.transpose() * A;
  Vector eta = A.transpose() * b;
  Matrix actualL;
  Vector actualeta;
  boost::tie(actualL, actualeta) = gfg.hessian();
  EXPECT(assert_equal(L, actualL));
  EXPECT(assert_equal(eta, actualeta));

  // hessianBlockDiagonal
  VectorValues expectLdiagonal;  // Make explicit that diagonal is sum-squares of columns
  expectLdiagonal.insert(0, Vector3(1 + 25 + 81, 4 + 36 + 100, 9 + 49));
  expectLdiagonal.insert(1, Vector2(121 + 196, 144 + 225));
  EXPECT(assert_equal(expectLdiagonal, gfg.hessianDiagonal()));

  // hessianBlockDiagonal
  map<Key, Matrix> actualBD = gfg.hessianBlockDiagonal();
  LONGS_EQUAL(2, actualBD.size());
  EXPECT(assert_equal(A00.transpose() * A00 + A10.transpose() * A10, actualBD[0]));
  EXPECT(assert_equal(A11.transpose() * A11, actualBD[1]));
}
/* ************************************************************************* */
TEST( SubgraphPreconditioner, planarGraph )
  {
  // Check planar graph construction
  GaussianFactorGraph A;
  VectorValues xtrue;
  boost::tie(A, xtrue) = planarGraph(3);
  LONGS_EQUAL(13,A.size());
  LONGS_EQUAL(9,xtrue.size());
  DOUBLES_EQUAL(0,error(A,xtrue),1e-9); // check zero error for xtrue

  // Check that xtrue is optimal
  GaussianBayesNet::shared_ptr R1 = GaussianSequentialSolver(A).eliminate();
  VectorValues actual = optimize(*R1);
  CHECK(assert_equal(xtrue,actual));
}
Пример #26
0
/* ************************************************************************* */
TEST (Serialization, gaussian_bayes_tree) {
  const Key x1=1, x2=2, x3=3, x4=4;
  const Ordering chainOrdering = Ordering(list_of(x2)(x1)(x3)(x4));
  const SharedDiagonal chainNoise = noiseModel::Isotropic::Sigma(1, 0.5);
  const GaussianFactorGraph chain = list_of
    (JacobianFactor(x2, (Matrix(1, 1) << 1.), x1, (Matrix(1, 1) << 1.), (Vector(1) << 1.),  chainNoise))
    (JacobianFactor(x2, (Matrix(1, 1) << 1.), x3, (Matrix(1, 1) << 1.), (Vector(1) << 1.),  chainNoise))
    (JacobianFactor(x3, (Matrix(1, 1) << 1.), x4, (Matrix(1, 1) << 1.), (Vector(1) << 1.),  chainNoise))
    (JacobianFactor(x4, (Matrix(1, 1) << 1.), (Vector(1) << 1.),  chainNoise));

  GaussianBayesTree init = *chain.eliminateMultifrontal(chainOrdering);
  GaussianBayesTree expected = *chain.eliminateMultifrontal(chainOrdering);
  GaussianBayesTree actual;

  std::string serialized = serialize(init);
  deserialize(serialized, actual);
  EXPECT(assert_equal(expected, actual));
}
Пример #27
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, multiplyHessianAdd) {
  GaussianFactorGraph gfg = createSimpleGaussianFactorGraph();

  VectorValues x = map_list_of<Key, Vector>(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6));

  VectorValues expected;
  expected.insert(0, Vector2(-450, -450));
  expected.insert(1, Vector2(0, 0));
  expected.insert(2, Vector2(950, 1050));

  VectorValues actual;
  gfg.multiplyHessianAdd(1.0, x, actual);
  EXPECT(assert_equal(expected, actual));

  // now, do it with non-zero y
  gfg.multiplyHessianAdd(1.0, x, actual);
  EXPECT(assert_equal(2 * expected, actual));
}
Пример #28
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, eliminate_empty) {
  // eliminate an empty factor
  GaussianFactorGraph gfg;
  gfg.add(JacobianFactor());
  GaussianBayesNet::shared_ptr actualBN;
  GaussianFactorGraph::shared_ptr remainingGFG;
  boost::tie(actualBN, remainingGFG) = gfg.eliminatePartialSequential(Ordering());

  // expected Bayes net is empty
  GaussianBayesNet expectedBN;

  // expected remaining graph should be the same as the original, still containing the empty factor
  GaussianFactorGraph expectedLF = gfg;

  // check if the result matches
  EXPECT(assert_equal(*actualBN, expectedBN));
  EXPECT(assert_equal(*remainingGFG, expectedLF));
}
Пример #29
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, matrices2) {
  GaussianFactorGraph gfg = createSimpleGaussianFactorGraph();
  Matrix A;
  Vector b;
  boost::tie(A, b) = gfg.jacobian();
  Matrix AtA;
  Vector eta;
  boost::tie(AtA, eta) = gfg.hessian();
  EXPECT(assert_equal(A.transpose() * A, AtA));
  EXPECT(assert_equal(A.transpose() * b, eta));
  Matrix expectedAtA(6, 6);
  expectedAtA << 125, 0, -25, 0, -100, 0,  //
      0, 125, 0, -25, 0, -100,             //
      -25, 0, 50, 0, -25, 0,               //
      0, -25, 0, 50, 0, -25,               //
      -100, 0, -25, 0, 225, 0,             //
      0, -100, 0, -25, 0, 225;
  EXPECT(assert_equal(expectedAtA, AtA));
}
Пример #30
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, gradient) {
  GaussianFactorGraph fg = createSimpleGaussianFactorGraph();

  // Construct expected gradient
  // 2*f(x) = 100*(x1+c[X(1)])^2 + 100*(x2-x1-[0.2;-0.1])^2 + 25*(l1-x1-[0.0;0.2])^2 +
  // 25*(l1-x2-[-0.2;0.3])^2
  // worked out: df/dx1 = 100*[0.1;0.1] + 100*[0.2;-0.1]) + 25*[0.0;0.2] = [10+20;10-10+5] = [30;5]
  VectorValues expected = map_list_of<Key, Vector>(1, Vector2(5.0, -12.5))(2, Vector2(30.0, 5.0))(
      0, Vector2(-25.0, 17.5));

  // Check the gradient at delta=0
  VectorValues zero = VectorValues::Zero(expected);
  VectorValues actual = fg.gradient(zero);
  EXPECT(assert_equal(expected, actual));
  EXPECT(assert_equal(expected, fg.gradientAtZero()));

  // Check the gradient at the solution (should be zero)
  VectorValues solution = fg.optimize();
  VectorValues actual2 = fg.gradient(solution);
  EXPECT(assert_equal(VectorValues::Zero(solution), actual2));
}