Пример #1
0
/* ************************************************************************* */
TEST( ISAM, iSAM_smoother )
{
  Ordering ordering;
  for (int t = 1; t <= 7; t++) ordering += X(t);

  // Create smoother with 7 nodes
  GaussianFactorGraph smoother = createSmoother(7);

  // run iSAM for every factor
  GaussianISAM actual;
  for(boost::shared_ptr<GaussianFactor> factor: smoother) {
    GaussianFactorGraph factorGraph;
    factorGraph.push_back(factor);
    actual.update(factorGraph);
  }

  // Create expected Bayes Tree by solving smoother with "natural" ordering
  GaussianBayesTree expected = *smoother.eliminateMultifrontal(ordering);

  // Verify sigmas in the bayes tree
  for(const GaussianBayesTree::sharedClique& clique: expected.nodes() | br::map_values) {
    GaussianConditional::shared_ptr conditional = clique->conditional();
    EXPECT(!conditional->get_model());
  }

  // Check whether BayesTree is correct
  EXPECT(assert_equal(GaussianFactorGraph(expected).augmentedHessian(), GaussianFactorGraph(actual).augmentedHessian()));

  // obtain solution
  VectorValues e; // expected solution
  for (int t = 1; t <= 7; t++) e.insert(X(t), Vector::Zero(2));
  VectorValues optimized = actual.optimize(); // actual solution
  EXPECT(assert_equal(e, optimized));
}
Пример #2
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, multiplyHessianAdd2) {
  GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();

  // brute force
  Matrix AtA;
  Vector eta;
  boost::tie(AtA, eta) = gfg.hessian();
  Vector X(6);
  X << 1, 2, 3, 4, 5, 6;
  Vector Y(6);
  Y << -450, -450, 300, 400, 2950, 3450;
  EXPECT(assert_equal(Y, AtA * X));

  VectorValues x = map_list_of<Key, Vector>(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6));

  VectorValues expected;
  expected.insert(0, Vector2(-450, -450));
  expected.insert(1, Vector2(300, 400));
  expected.insert(2, Vector2(2950, 3450));

  VectorValues actual;
  gfg.multiplyHessianAdd(1.0, x, actual);
  EXPECT(assert_equal(expected, actual));

  // now, do it with non-zero y
  gfg.multiplyHessianAdd(1.0, x, actual);
  EXPECT(assert_equal(2 * expected, actual));
}
Пример #3
0
VectorValues KeyInfo::x0() const {
  VectorValues result;
  BOOST_FOREACH ( const KeyInfo::value_type &item, *this ) {
    result.insert(item.first, Vector::Zero(item.second.dim()));
  }
  return result;
}
Пример #4
0
  /* ************************************************************************* */
  VectorValues GaussianFactorGraph::optimizeGradientSearch() const
  {
    gttic(GaussianFactorGraph_optimizeGradientSearch);

    gttic(Compute_Gradient);
    // Compute gradient (call gradientAtZero function, which is defined for various linear systems)
    VectorValues grad = gradientAtZero();
    double gradientSqNorm = grad.dot(grad);
    gttoc(Compute_Gradient);

    gttic(Compute_Rg);
    // Compute R * g
    Errors Rg = *this * grad;
    gttoc(Compute_Rg);

    gttic(Compute_minimizing_step_size);
    // Compute minimizing step size
    double step = -gradientSqNorm / dot(Rg, Rg);
    gttoc(Compute_minimizing_step_size);

    gttic(Compute_point);
    // Compute steepest descent point
    grad *= step;
    gttoc(Compute_point);

    return grad;
  }
Пример #5
0
  /* ************************************************************************* */
  VectorValues GaussianConditional::solve(const VectorValues& x) const
  {
    // Concatenate all vector values that correspond to parent variables
    const Vector xS = x.vector(FastVector<Key>(beginParents(), endParents()));

    // Update right-hand-side
    const Vector rhs = get_d() - get_S() * xS;

    // Solve matrix
    const Vector solution = get_R().triangularView<Eigen::Upper>().solve(rhs);

    // Check for indeterminant solution
    if (solution.hasNaN()) {
      throw IndeterminantLinearSystemException(keys().front());
    }

    // Insert solution into a VectorValues
    VectorValues result;
    DenseIndex vectorPosition = 0;
    for (const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) {
      result.insert(*frontal, solution.segment(vectorPosition, getDim(frontal)));
      vectorPosition += getDim(frontal);
    }

    return result;
  }
Пример #6
0
/* ************************************************************************* */
TEST(VectorValues, resizeLike) {
  // insert, with out-of-order indices
  VectorValues original;
  original.insert(0, Vector_(1, 1.0));
  original.insert(1, Vector_(2, 2.0, 3.0));
  original.insert(5, Vector_(2, 6.0, 7.0));
  original.insert(2, Vector_(2, 4.0, 5.0));

  VectorValues actual(10, 3);
  actual.resizeLike(original);

  // Check dimensions
  LONGS_EQUAL(6, actual.size());
  LONGS_EQUAL(7, actual.dim());
  LONGS_EQUAL(1, actual.dim(0));
  LONGS_EQUAL(2, actual.dim(1));
  LONGS_EQUAL(2, actual.dim(2));
  LONGS_EQUAL(2, actual.dim(5));

  // Logic
  EXPECT(actual.exists(0));
  EXPECT(actual.exists(1));
  EXPECT(actual.exists(2));
  EXPECT(!actual.exists(3));
  EXPECT(!actual.exists(4));
  EXPECT(actual.exists(5));
  EXPECT(!actual.exists(6));

  // Check exceptions
  CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument);
}
Пример #7
0
  /* ************************************************************************* */
  VectorValues GaussianConditional::solveOtherRHS(
    const VectorValues& parents, const VectorValues& rhs) const
  {
    // Concatenate all vector values that correspond to parent variables
    Vector xS = parents.vector(FastVector<Key>(beginParents(), endParents()));

    // Instead of updating getb(), update the right-hand-side from the given rhs
    const Vector rhsR = rhs.vector(FastVector<Key>(beginFrontals(), endFrontals()));
    xS = rhsR - get_S() * xS;

    // Solve Matrix
    Vector soln = get_R().triangularView<Eigen::Upper>().solve(xS);

    // Scale by sigmas
    if(model_)
      soln.array() *= model_->sigmas().array();

    // Insert solution into a VectorValues
    VectorValues result;
    DenseIndex vectorPosition = 0;
    for(const_iterator frontal = beginFrontals(); frontal != endFrontals(); ++frontal) {
      result.insert(*frontal, soln.segment(vectorPosition, getDim(frontal)));
      vectorPosition += getDim(frontal);
    }

    return result;
  }
Пример #8
0
/* ************************************************************************* */
TEST(HessianFactor, CombineAndEliminate2) {
  Matrix A01 = I_3x3;
  Vector3 b0(1.5, 1.5, 1.5);
  Vector3 s0(1.6, 1.6, 1.6);

  Matrix A10 = 2.0 * I_3x3;
  Matrix A11 = -2.0 * I_3x3;
  Vector3 b1(2.5, 2.5, 2.5);
  Vector3 s1(2.6, 2.6, 2.6);

  Matrix A21 = 3.0 * I_3x3;
  Vector3 b2(3.5, 3.5, 3.5);
  Vector3 s2(3.6, 3.6, 3.6);

  GaussianFactorGraph gfg;
  gfg.add(1, A01, b0, noiseModel::Diagonal::Sigmas(s0, true));
  gfg.add(0, A10, 1, A11, b1, noiseModel::Diagonal::Sigmas(s1, true));
  gfg.add(1, A21, b2, noiseModel::Diagonal::Sigmas(s2, true));

  Matrix93 A0, A1;
  A0 << A10, Z_3x3, Z_3x3;
  A1 << A11, A01, A21;
  Vector9 b, sigmas;
  b << b1, b0, b2;
  sigmas << s1, s0, s2;

  // create a full, uneliminated version of the factor
  JacobianFactor jacobian(0, A0, 1, A1, b,
      noiseModel::Diagonal::Sigmas(sigmas, true));

  // Make sure combining works
  HessianFactor hessian(gfg);
  EXPECT(assert_equal(HessianFactor(jacobian), hessian, 1e-6));
  EXPECT(
      assert_equal(jacobian.augmentedInformation(),
          hessian.augmentedInformation(), 1e-9));

  // perform elimination on jacobian
  Ordering ordering = list_of(0);
  GaussianConditional::shared_ptr expectedConditional;
  JacobianFactor::shared_ptr expectedFactor;
  boost::tie(expectedConditional, expectedFactor) = //
      jacobian.eliminate(ordering);

  // Eliminate
  GaussianConditional::shared_ptr actualConditional;
  HessianFactor::shared_ptr actualHessian;
  boost::tie(actualConditional, actualHessian) = //
      EliminateCholesky(gfg, ordering);

  EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
  VectorValues v;
  v.insert(1, Vector3(1, 2, 3));
  EXPECT_DOUBLES_EQUAL(expectedFactor->error(v), actualHessian->error(v), 1e-9);
  EXPECT(
      assert_equal(expectedFactor->augmentedInformation(),
          actualHessian->augmentedInformation(), 1e-9));
  EXPECT(assert_equal(HessianFactor(*expectedFactor), *actualHessian, 1e-6));
}
Пример #9
0
/* ************************************************************************* */
TEST(LPSolver, LinearCost) {
  LinearCost cost(1, Vector3(2., 4., 6.));
  VectorValues x;
  x.insert(1, Vector3(1., 3., 5.));
  double error = cost.error(x);
  double expectedError = 44.0;
  DOUBLES_EQUAL(expectedError, error, 1e-100);
}
Пример #10
0
/* ************************************************************************* */
double HessianFactor::error(const VectorValues& c) const {
	// error 0.5*(f - 2*x'*g + x'*G*x)
	const double f = constantTerm();
	const double xtg = c.vector().dot(linearTerm());
	const double xGx = c.vector().transpose() * info_.range(0, this->size(), 0, this->size()).selfadjointView<Eigen::Upper>() *	c.vector();

	return 0.5 * (f - 2.0 * xtg +  xGx);
}
Пример #11
0
 /* ************************************************************************* */
 VectorValues GaussianFactorGraph::gradientAtZero() const {
   // Zero-out the gradient
   VectorValues g;
   BOOST_FOREACH(const sharedFactor& factor, *this) {
     VectorValues gi = factor->gradientAtZero();
     g.addInPlace_(gi);
   }
   return g;
 }
Пример #12
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, gradientAtZero) {
  GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
  VectorValues expected;
  VectorValues actual = gfg.gradientAtZero();
  expected.insert(0, Vector2(-25, 17.5));
  expected.insert(1, Vector2(5, -13.5));
  expected.insert(2, Vector2(29, 4));
  EXPECT(assert_equal(expected, actual));
}
Пример #13
0
 /* ************************************************************************* */
 VectorValues GaussianFactorGraph::hessianDiagonal() const {
   VectorValues d;
   BOOST_FOREACH(const sharedFactor& factor, *this) {
     if(factor){
       VectorValues di = factor->hessianDiagonal();
       d.addInPlace_(di);
     }
   }
   return d;
 }
Пример #14
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, matrices) {
  // Create factor graph:
  // x1 x2 x3 x4 x5  b
  //  1  2  3  0  0  4
  //  5  6  7  0  0  8
  //  9 10  0 11 12 13
  //  0  0  0 14 15 16

  Matrix A00 = (Matrix(2, 3) << 1, 2, 3, 5, 6, 7).finished();
  Matrix A10 = (Matrix(2, 3) << 9, 10, 0, 0, 0, 0).finished();
  Matrix A11 = (Matrix(2, 2) << 11, 12, 14, 15).finished();

  GaussianFactorGraph gfg;
  SharedDiagonal model = noiseModel::Unit::Create(2);
  gfg.add(0, A00, Vector2(4., 8.), model);
  gfg.add(0, A10, 1, A11, Vector2(13., 16.), model);

  Matrix Ab(4, 6);
  Ab << 1, 2, 3, 0, 0, 4, 5, 6, 7, 0, 0, 8, 9, 10, 0, 11, 12, 13, 0, 0, 0, 14, 15, 16;

  // augmented versions
  EXPECT(assert_equal(Ab, gfg.augmentedJacobian()));
  EXPECT(assert_equal(Ab.transpose() * Ab, gfg.augmentedHessian()));

  // jacobian
  Matrix A = Ab.leftCols(Ab.cols() - 1);
  Vector b = Ab.col(Ab.cols() - 1);
  Matrix actualA;
  Vector actualb;
  boost::tie(actualA, actualb) = gfg.jacobian();
  EXPECT(assert_equal(A, actualA));
  EXPECT(assert_equal(b, actualb));

  // hessian
  Matrix L = A.transpose() * A;
  Vector eta = A.transpose() * b;
  Matrix actualL;
  Vector actualeta;
  boost::tie(actualL, actualeta) = gfg.hessian();
  EXPECT(assert_equal(L, actualL));
  EXPECT(assert_equal(eta, actualeta));

  // hessianBlockDiagonal
  VectorValues expectLdiagonal;  // Make explicit that diagonal is sum-squares of columns
  expectLdiagonal.insert(0, Vector3(1 + 25 + 81, 4 + 36 + 100, 9 + 49));
  expectLdiagonal.insert(1, Vector2(121 + 196, 144 + 225));
  EXPECT(assert_equal(expectLdiagonal, gfg.hessianDiagonal()));

  // hessianBlockDiagonal
  map<Key, Matrix> actualBD = gfg.hessianBlockDiagonal();
  LONGS_EQUAL(2, actualBD.size());
  EXPECT(assert_equal(A00.transpose() * A00 + A10.transpose() * A10, actualBD[0]));
  EXPECT(assert_equal(A11.transpose() * A11, actualBD[1]));
}
Пример #15
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, hessianDiagonal) {
  GaussianFactorGraph gfg = createGaussianFactorGraphWithHessianFactor();
  VectorValues expected;
  Matrix infoMatrix = gfg.hessian().first;
  Vector d = infoMatrix.diagonal();

  VectorValues actual = gfg.hessianDiagonal();
  expected.insert(0, d.segment<2>(0));
  expected.insert(1, d.segment<2>(2));
  expected.insert(2, d.segment<2>(4));
  EXPECT(assert_equal(expected, actual));
}
/* ************************************************************************* */
TEST(GaussianBayesNet, ComputeSteepestDescentPoint) {

  // Create an arbitrary Bayes Net
  GaussianBayesNet gbn;
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
    0, Vector2(1.0,2.0), (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(),
    3, (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(),
    4, (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished()));
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
    1, Vector2(15.0,16.0), (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(),
    2, (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(),
    4, (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished()));
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
    2, Vector2(29.0,30.0), (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(),
    3, (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished()));
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
    3, Vector2(39.0,40.0), (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(),
    4, (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished()));
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
    4, Vector2(49.0,50.0), (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished()));

  // Compute the Hessian numerically
  Matrix hessian = numericalHessian<Vector10>(
      boost::bind(&computeError, gbn, _1), Vector10::Zero());

  // Compute the gradient numerically
  Vector gradient = numericalGradient<Vector10>(
      boost::bind(&computeError, gbn, _1), Vector10::Zero());

  // Compute the gradient using dense matrices
  Matrix augmentedHessian = GaussianFactorGraph(gbn).augmentedHessian();
  LONGS_EQUAL(11, (long)augmentedHessian.cols());
  Vector denseMatrixGradient = -augmentedHessian.col(10).segment(0,10);
  EXPECT(assert_equal(gradient, denseMatrixGradient, 1e-5));

  // Compute the steepest descent point
  double step = -gradient.squaredNorm() / (gradient.transpose() * hessian * gradient)(0);
  Vector expected = gradient * step;

  // Compute the steepest descent point with the dogleg function
  VectorValues actual = gbn.optimizeGradientSearch();

  // Check that points agree
  FastVector<Key> keys = list_of(0)(1)(2)(3)(4);
  Vector actualAsVector = actual.vector(keys);
  EXPECT(assert_equal(expected, actualAsVector, 1e-5));

  // Check that point causes a decrease in error
  double origError = GaussianFactorGraph(gbn).error(VectorValues::Zero(actual));
  double newError = GaussianFactorGraph(gbn).error(actual);
  EXPECT(newError < origError);
}
Пример #17
0
/* ************************************************************************* */
TEST( testLinearContainerFactor, jacobian_factor_withlinpoints ) {

  Matrix A1 = (Matrix(2, 2) <<
      2.74222, -0.0067457,
      0.0,  2.63624);
  Matrix A2 = (Matrix(2, 2) <<
      -0.0455167, -0.0443573,
      -0.0222154, -0.102489);
  Vector b = (Vector(2) << 0.0277052,
      -0.0533393);

  JacobianFactor expLinFactor(l1, A1, l2, A2, b, diag_model2);

  Values values;
  values.insert(l1, landmark1);
  values.insert(l2, landmark2);
  values.insert(x1, poseA1);
  values.insert(x2, poseA2);

  LinearContainerFactor actFactor(expLinFactor, values);
  LinearContainerFactor actFactorNolin(expLinFactor);

  EXPECT(assert_equal(actFactor, actFactor, tol));
  EXPECT(assert_inequal(actFactor, actFactorNolin, tol));
  EXPECT(assert_inequal(actFactorNolin, actFactor, tol));

  // Check contents
  Values expLinPoint;
  expLinPoint.insert(l1, landmark1);
  expLinPoint.insert(l2, landmark2);
  CHECK(actFactor.linearizationPoint());
  EXPECT(actFactor.hasLinearizationPoint());
  EXPECT(assert_equal(expLinPoint, *actFactor.linearizationPoint()));

  // Check error evaluation
  Vector delta_l1 = (Vector(2) << 1.0, 2.0);
  Vector delta_l2 = (Vector(2) << 3.0, 4.0);

  VectorValues delta = values.zeroVectors();
  delta.at(l1) = delta_l1;
  delta.at(l2) = delta_l2;
  Values noisyValues = values.retract(delta);
  double expError = expLinFactor.error(delta);
  EXPECT_DOUBLES_EQUAL(expError, actFactor.error(noisyValues), tol);
  EXPECT_DOUBLES_EQUAL(expLinFactor.error(values.zeroVectors()), actFactor.error(values), tol);

  // Check linearization with corrections for updated linearization point
  GaussianFactor::shared_ptr actLinearizationB = actFactor.linearize(noisyValues);
  Vector bprime = b - A1 * delta_l1 - A2 * delta_l2;
  JacobianFactor expLinFactor2(l1, A1, l2, A2, bprime, diag_model2);
  EXPECT(assert_equal(*expLinFactor2.clone(), *actLinearizationB, tol));
}
Пример #18
0
 /* ************************************************************************* */
 VectorValues Values::localCoordinates(const Values& cp) const {
   if(this->size() != cp.size())
     throw DynamicValuesMismatched();
   VectorValues result;
   for(const_iterator it1=this->begin(), it2=cp.begin(); it1!=this->end(); ++it1, ++it2) {
     if(it1->key != it2->key)
       throw DynamicValuesMismatched(); // If keys do not match
     // Will throw a dynamic_cast exception if types do not match
     // NOTE: this is separate from localCoordinates(cp, ordering, result) due to at() vs. insert
     result.insert(it1->key, it1->value.localCoordinates_(it2->value));
   }
   return result;
 }
Пример #19
0
/* ************************************************************************* */
TEST(HessianFactor, CombineAndEliminate1) {
  Matrix3 A01 = 3.0 * I_3x3;
  Vector3 b0(1, 0, 0);

  Matrix3 A21 = 4.0 * I_3x3;
  Vector3 b2 = Vector3::Zero();

  GaussianFactorGraph gfg;
  gfg.add(1, A01, b0);
  gfg.add(1, A21, b2);

  Matrix63 A1;
  A1 << A01, A21;
  Vector6 b;
  b << b0, b2;

  // create a full, uneliminated version of the factor
  JacobianFactor jacobian(1, A1, b);

  // Make sure combining works
  HessianFactor hessian(gfg);
  VectorValues v;
  v.insert(1, Vector3(1, 0, 0));
  EXPECT_DOUBLES_EQUAL(jacobian.error(v), hessian.error(v), 1e-9);
  EXPECT(assert_equal(HessianFactor(jacobian), hessian, 1e-6));
  EXPECT(assert_equal(25.0 * I_3x3, hessian.information(), 1e-9));
  EXPECT(
      assert_equal(jacobian.augmentedInformation(),
          hessian.augmentedInformation(), 1e-9));

  // perform elimination on jacobian
  Ordering ordering = list_of(1);
  GaussianConditional::shared_ptr expectedConditional;
  JacobianFactor::shared_ptr expectedFactor;
  boost::tie(expectedConditional, expectedFactor) = //
      jacobian.eliminate(ordering);

  // Eliminate
  GaussianConditional::shared_ptr actualConditional;
  HessianFactor::shared_ptr actualHessian;
  boost::tie(actualConditional, actualHessian) = //
      EliminateCholesky(gfg, ordering);

  EXPECT(assert_equal(*expectedConditional, *actualConditional, 1e-6));
  EXPECT_DOUBLES_EQUAL(expectedFactor->error(v), actualHessian->error(v), 1e-9);
  EXPECT(
      assert_equal(expectedFactor->augmentedInformation(),
          actualHessian->augmentedInformation(), 1e-9));
  EXPECT(assert_equal(HessianFactor(*expectedFactor), *actualHessian, 1e-6));
}
Пример #20
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, transposeMultiplication) {
  GaussianFactorGraph A = createSimpleGaussianFactorGraph();

  Errors e;
  e += Vector2(0.0, 0.0), Vector2(15.0, 0.0), Vector2(0.0, -5.0), Vector2(-7.5, -5.0);

  VectorValues expected;
  expected.insert(1, Vector2(-37.5, -50.0));
  expected.insert(2, Vector2(-150.0, 25.0));
  expected.insert(0, Vector2(187.5, 25.0));

  VectorValues actual = A.transposeMultiply(e);
  EXPECT(assert_equal(expected, actual));
}
/* ************************************************************************* */
TEST( SubgraphPreconditioner, planarGraph )
  {
  // Check planar graph construction
  GaussianFactorGraph A;
  VectorValues xtrue;
  boost::tie(A, xtrue) = planarGraph(3);
  LONGS_EQUAL(13,A.size());
  LONGS_EQUAL(9,xtrue.size());
  DOUBLES_EQUAL(0,error(A,xtrue),1e-9); // check zero error for xtrue

  // Check that xtrue is optimal
  GaussianBayesNet::shared_ptr R1 = GaussianSequentialSolver(A).eliminate();
  VectorValues actual = optimize(*R1);
  CHECK(assert_equal(xtrue,actual));
}
Пример #22
0
TEST(LPInitSolver, infinite_loop_multi_var) {
  LP initchecker;
  Key X = symbol('X', 1);
  Key Y = symbol('Y', 1);
  Key Z = symbol('Z', 1);
  initchecker.cost = LinearCost(Z, kOne);  // min alpha
  initchecker.inequalities.push_back(
      LinearInequality(X, -2.0 * kOne, Y, -1.0 * kOne, Z, -1.0 * kOne, -2,
                       1));  //-2x-y-alpha <= -2
  initchecker.inequalities.push_back(
      LinearInequality(X, -1.0 * kOne, Y, 2.0 * kOne, Z, -1.0 * kOne, 6,
                       2));  // -x+2y-alpha <= 6
  initchecker.inequalities.push_back(LinearInequality(
      X, -1.0 * kOne, Z, -1.0 * kOne, 0, 3));  // -x - alpha <= 0
  initchecker.inequalities.push_back(LinearInequality(
      X, 1.0 * kOne, Z, -1.0 * kOne, 20, 4));  // x - alpha <= 20
  initchecker.inequalities.push_back(LinearInequality(
      Y, -1.0 * kOne, Z, -1.0 * kOne, 0, 5));  // -y - alpha <= 0
  LPSolver solver(initchecker);
  VectorValues starter;
  starter.insert(X, kZero);
  starter.insert(Y, kZero);
  starter.insert(Z, Vector::Constant(1, 2.0));
  VectorValues results, duals;
  boost::tie(results, duals) = solver.optimize(starter);
  VectorValues expected;
  expected.insert(X, Vector::Constant(1, 13.5));
  expected.insert(Y, Vector::Constant(1, 6.5));
  expected.insert(Z, Vector::Constant(1, -6.5));
  CHECK(assert_equal(results, expected, 1e-7));
}
Пример #23
0
/* ************************************************************************* */
TEST(LPSolver, simpleTest1) {
  LP lp = simpleLP1();
  LPSolver lpSolver(lp);
  VectorValues init;
  init.insert(1, Vector::Zero(2));

  VectorValues x1 =
      lpSolver.buildWorkingGraph(InequalityFactorGraph(), init).optimize();
  VectorValues expected_x1;
  expected_x1.insert(1, Vector::Ones(2));
  CHECK(assert_equal(expected_x1, x1, 1e-10));

  VectorValues result, duals;
  boost::tie(result, duals) = lpSolver.optimize(init);
  VectorValues expectedResult;
  expectedResult.insert(1, Vector2(8. / 3., 2. / 3.));
  CHECK(assert_equal(expectedResult, result, 1e-10));
}
Пример #24
0
/* ************************************************************************* */
TEST(GaussianFactorGraph, multiplyHessianAdd) {
  GaussianFactorGraph gfg = createSimpleGaussianFactorGraph();

  VectorValues x = map_list_of<Key, Vector>(0, Vector2(1, 2))(1, Vector2(3, 4))(2, Vector2(5, 6));

  VectorValues expected;
  expected.insert(0, Vector2(-450, -450));
  expected.insert(1, Vector2(0, 0));
  expected.insert(2, Vector2(950, 1050));

  VectorValues actual;
  gfg.multiplyHessianAdd(1.0, x, actual);
  EXPECT(assert_equal(expected, actual));

  // now, do it with non-zero y
  gfg.multiplyHessianAdd(1.0, x, actual);
  EXPECT(assert_equal(2 * expected, actual));
}
Пример #25
0
  /* ************************************************************************* */
  Values Values::retract(const VectorValues& delta) const
  {
    Values result;

    for(const_iterator key_value = begin(); key_value != end(); ++key_value) {
      VectorValues::const_iterator vector_item = delta.find(key_value->key);
      Key key = key_value->key;  // Non-const duplicate to deal with non-const insert argument
      if(vector_item != delta.end()) {
        const Vector& singleDelta = vector_item->second;
        Value* retractedValue(key_value->value.retract_(singleDelta)); // Retract
        result.values_.insert(key, retractedValue); // Add retracted result directly to result values
      } else {
        result.values_.insert(key, key_value->value.clone_()); // Add original version to result values
      }
    }

    return result;
  }
Пример #26
0
/* ************************************************************************* */
TEST(HessianFactor, gradientAtZero)
{
  Matrix G11 = (Matrix(1, 1) << 1);
  Matrix G12 = (Matrix(1, 2) << 0, 0);
  Matrix G22 = (Matrix(2, 2) << 1, 0, 0, 1);
  Vector g1 = (Vector(1) << -7);
  Vector g2 = (Vector(2) << -8, -9);
  double f = 194;

  HessianFactor factor(0, 1, G11, G12, g1, G22, g2, f);

  // test gradient at zero
  VectorValues expectedG = pair_list_of<Key, Vector>(0, -g1) (1, -g2);
  Matrix A; Vector b; boost::tie(A,b) = factor.jacobian();
  FastVector<Key> keys; keys += 0,1;
  EXPECT(assert_equal(-A.transpose()*b, expectedG.vector(keys)));
  VectorValues actualG = factor.gradientAtZero();
  EXPECT(assert_equal(expectedG, actualG));
}
/* ************************************************************************* */
TEST(DoglegOptimizer, ComputeBlend) {
  // Create an arbitrary Bayes Net
  GaussianBayesNet gbn;
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
      0, Vector2(1.0,2.0), (Matrix(2, 2) << 3.0,4.0,0.0,6.0).finished(),
      3, (Matrix(2, 2) << 7.0,8.0,9.0,10.0).finished(),
      4, (Matrix(2, 2) << 11.0,12.0,13.0,14.0).finished()));
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
      1, Vector2(15.0,16.0), (Matrix(2, 2) << 17.0,18.0,0.0,20.0).finished(),
      2, (Matrix(2, 2) << 21.0,22.0,23.0,24.0).finished(),
      4, (Matrix(2, 2) << 25.0,26.0,27.0,28.0).finished()));
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
      2, Vector2(29.0,30.0), (Matrix(2, 2) << 31.0,32.0,0.0,34.0).finished(),
      3, (Matrix(2, 2) << 35.0,36.0,37.0,38.0).finished()));
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
      3, Vector2(39.0,40.0), (Matrix(2, 2) << 41.0,42.0,0.0,44.0).finished(),
      4, (Matrix(2, 2) << 45.0,46.0,47.0,48.0).finished()));
  gbn += GaussianConditional::shared_ptr(new GaussianConditional(
      4, Vector2(49.0,50.0), (Matrix(2, 2) << 51.0,52.0,0.0,54.0).finished()));

  // Compute steepest descent point
  VectorValues xu = gbn.optimizeGradientSearch();

  // Compute Newton's method point
  VectorValues xn = gbn.optimize();

  // The Newton's method point should be more "adventurous", i.e. larger, than the steepest descent point
  EXPECT(xu.vector().norm() < xn.vector().norm());

  // Compute blend
  double Delta = 1.5;
  VectorValues xb = DoglegOptimizerImpl::ComputeBlend(Delta, xu, xn);
  DOUBLES_EQUAL(Delta, xb.vector().norm(), 1e-10);
}
Пример #28
0
TEST(LPInitSolver, infinite_loop_single_var) {
  LP initchecker;
  initchecker.cost = LinearCost(1, Vector3(0, 0, 1));  // min alpha
  initchecker.inequalities.push_back(
      LinearInequality(1, Vector3(-2, -1, -1), -2, 1));  //-2x-y-alpha <= -2
  initchecker.inequalities.push_back(
      LinearInequality(1, Vector3(-1, 2, -1), 6, 2));  // -x+2y-alpha <= 6
  initchecker.inequalities.push_back(
      LinearInequality(1, Vector3(-1, 0, -1), 0, 3));  // -x - alpha <= 0
  initchecker.inequalities.push_back(
      LinearInequality(1, Vector3(1, 0, -1), 20, 4));  // x - alpha <= 20
  initchecker.inequalities.push_back(
      LinearInequality(1, Vector3(0, -1, -1), 0, 5));  // -y - alpha <= 0
  LPSolver solver(initchecker);
  VectorValues starter;
  starter.insert(1, Vector3(0, 0, 2));
  VectorValues results, duals;
  boost::tie(results, duals) = solver.optimize(starter);
  VectorValues expected;
  expected.insert(1, Vector3(13.5, 6.5, -6.5));
  CHECK(assert_equal(results, expected, 1e-7));
}
Пример #29
0
/* ************************************************************************* */
TEST(VectorValues, copyConstructor) {

  // insert, with out-of-order indices
  VectorValues original;
  original.insert(0, Vector_(1, 1.0));
  original.insert(1, Vector_(2, 2.0, 3.0));
  original.insert(5, Vector_(2, 6.0, 7.0));
  original.insert(2, Vector_(2, 4.0, 5.0));

  VectorValues actual(original);

  // Check dimensions
  LONGS_EQUAL(6, actual.size());
  LONGS_EQUAL(7, actual.dim());
  LONGS_EQUAL(1, actual.dim(0));
  LONGS_EQUAL(2, actual.dim(1));
  LONGS_EQUAL(2, actual.dim(2));
  LONGS_EQUAL(2, actual.dim(5));

  // Logic
  EXPECT(actual.exists(0));
  EXPECT(actual.exists(1));
  EXPECT(actual.exists(2));
  EXPECT(!actual.exists(3));
  EXPECT(!actual.exists(4));
  EXPECT(actual.exists(5));
  EXPECT(!actual.exists(6));

  // Check values
  EXPECT(assert_equal(Vector_(1, 1.0), actual[0]));
  EXPECT(assert_equal(Vector_(2, 2.0, 3.0), actual[1]));
  EXPECT(assert_equal(Vector_(2, 4.0, 5.0), actual[2]));
  EXPECT(assert_equal(Vector_(2, 6.0, 7.0), actual[5]));
  EXPECT(assert_equal(Vector_(7, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0), actual.vector()));

  // Check exceptions
  CHECK_EXCEPTION(actual.insert(1, Vector()), invalid_argument);
}
Пример #30
0
/* ************************************************************************* */
TEST(HessianFactor, hessianDiagonal)
{
  Matrix G11 = (Matrix(1, 1) << 1);
  Matrix G12 = (Matrix(1, 2) << 0, 0);
  Matrix G22 = (Matrix(2, 2) << 1, 0, 0, 1);
  Vector g1 = (Vector(1) << -7);
  Vector g2 = (Vector(2) << -8, -9);
  double f = 194;

  HessianFactor factor(0, 1, G11, G12, g1, G22, g2, f);

  // hessianDiagonal
  VectorValues expected;
  expected.insert(0, (Vector(1) << 1));
  expected.insert(1, (Vector(2) << 1,1));
  EXPECT(assert_equal(expected, factor.hessianDiagonal()));

  // hessianBlockDiagonal
  map<Key,Matrix> actualBD = factor.hessianBlockDiagonal();
  LONGS_EQUAL(2,actualBD.size());
  EXPECT(assert_equal(G11,actualBD[0]));
  EXPECT(assert_equal(G22,actualBD[1]));
}