bool SparseOptimizerIncremental::initSolver(int dimension, int batchEveryN) { //cerr << __PRETTY_FUNCTION__ << endl; slamDimension = dimension; if (dimension == 3) { setAlgorithm(createSolver("fix3_2_cholmod")); OptimizationAlgorithmGaussNewton* gaussNewton = dynamic_cast<OptimizationAlgorithmGaussNewton*>(solver()); assert(gaussNewton); BlockSolver<BlockSolverTraits<3, 2> >* bs = dynamic_cast<BlockSolver<BlockSolverTraits<3, 2> >*>(gaussNewton->solver()); assert(bs && "Unable to get internal block solver"); LinearSolverCholmodOnline<Matrix3d>* s = dynamic_cast<LinearSolverCholmodOnline<Matrix3d>*>(bs->linearSolver()); bs->setAdditionalVectorSpace(300); bs->setSchur(false); _solverInterface = s; _underlyingSolver = bs; } else { setAlgorithm(createSolver("fix6_3_cholmod")); OptimizationAlgorithmGaussNewton* gaussNewton = dynamic_cast<OptimizationAlgorithmGaussNewton*>(solver()); assert(gaussNewton); BlockSolver<BlockSolverTraits<6, 3> >* bs = dynamic_cast<BlockSolver<BlockSolverTraits<6, 3> >*>(gaussNewton->solver()); assert(bs && "Unable to get internal block solver"); LinearSolverCholmodOnline<Matrix<double, 6, 6> >* s = dynamic_cast<LinearSolverCholmodOnline<Matrix<double, 6, 6> >*>(bs->linearSolver()); bs->setAdditionalVectorSpace(600); bs->setSchur(false); _solverInterface = s; _underlyingSolver = bs; } _solverInterface->cmember = &_cmember; _solverInterface->batchEveryN = batchEveryN; if (! solver()) { cerr << "Error allocating solver. Allocating CHOLMOD solver failed!" << endl; return false; } return true; }
KeyFrameGraph::KeyFrameGraph() : nextEdgeId(0) { typedef g2o::BlockSolver_7_3 BlockSolver; typedef g2o::LinearSolverCSparse<BlockSolver::PoseMatrixType> LinearSolver; //typedef g2o::LinearSolverPCG<BlockSolver::PoseMatrixType> LinearSolver; LinearSolver* solver = new LinearSolver(); BlockSolver* blockSolver = new BlockSolver(solver); g2o::OptimizationAlgorithmLevenberg* algorithm = new g2o::OptimizationAlgorithmLevenberg(blockSolver); graph.setAlgorithm(algorithm); graph.setVerbose(false); // printOptimizationInfo solver->setWriteDebug(true); blockSolver->setWriteDebug(true); algorithm->setWriteDebug(true); totalPoints=0; totalEdges=0; totalVertices=0; }
bool SparseOptimizerIncremental::initSolver(int dimension, int batchEveryN) { //cerr << __PRETTY_FUNCTION__ << endl; slamDimension = dimension; if (dimension == 3) { setSolver(createSolver(this, "fix3_2_cholmod")); BlockSolver<BlockSolverTraits<3, 2> >* bs = dynamic_cast<BlockSolver<BlockSolverTraits<3, 2> >*>(solver()); LinearSolverCholmodOnline<Matrix3d>* s = dynamic_cast<LinearSolverCholmodOnline<Matrix3d>*>(bs->linearSolver()); _solverInterface = s; } else { setSolver(createSolver(this, "fix6_3_cholmod")); BlockSolver<BlockSolverTraits<6, 3> >* bs = dynamic_cast<BlockSolver<BlockSolverTraits<6, 3> >*>(solver()); LinearSolverCholmodOnline<Matrix<double, 6, 6> >* s = dynamic_cast<LinearSolverCholmodOnline<Matrix<double, 6, 6> >*>(bs->linearSolver()); _solverInterface = s; } _solverInterface->cmember = &_cmember; _solverInterface->batchEveryN = batchEveryN; solver()->setSchur(false); if (! solver()) { cerr << "Error allocating solver. Allocating CHOLMOD solver failed!" << endl; return false; } return true; }