void EdgeProjectXYZ2UVQ::linearizeOplus() { VertexSE3Expmap * vj = static_cast<VertexSE3Expmap *>(_vertices[1]); SE3Quat T(vj->estimate()); VertexSBAPointXYZ* vi = static_cast<VertexSBAPointXYZ*>(_vertices[0]); Vector3d xyz = vi->estimate(); Vector3d xyz_trans = T.map(xyz); const double & x = xyz_trans[0]; const double & y = xyz_trans[1]; const double & z = xyz_trans[2]; double z_sq = z*z; const double & Fx = vj->_focal_length(0); const double & Fy = vj->_focal_length(1); double dq_dz = -Fx/z_sq; double x_Fx_by_zsq = x*Fx/z_sq; double y_Fy_by_zsq = y*Fy/z_sq; Matrix3d R = T.rotation().toRotationMatrix(); _jacobianOplusXi.row(0) = -Fx/z*R.row(0) + x_Fx_by_zsq*R.row(2); _jacobianOplusXi.row(1) = -Fy/z*R.row(1) + y_Fy_by_zsq*R.row(2); _jacobianOplusXi.row(2) = -dq_dz*R.row(2); _jacobianOplusXj(0,0) = x*y/z_sq *Fx; _jacobianOplusXj(0,1) = -(1+(x*x/z_sq)) *Fx; _jacobianOplusXj(0,2) = y/z *Fx; _jacobianOplusXj(0,3) = -1./z *Fx; _jacobianOplusXj(0,4) = 0; _jacobianOplusXj(0,5) = x/z_sq *Fx; _jacobianOplusXj(1,0) = (1+y*y/z_sq) *Fy; _jacobianOplusXj(1,1) = -x*y/z_sq *Fy; _jacobianOplusXj(1,2) = -x/z *Fy; _jacobianOplusXj(1,3) = 0; _jacobianOplusXj(1,4) = -1./z *Fy; _jacobianOplusXj(1,5) = y/z_sq *Fy; _jacobianOplusXj(2,0) = -y*dq_dz ; _jacobianOplusXj(2,1) = x*dq_dz; _jacobianOplusXj(2,2) = 0; _jacobianOplusXj(2,3) = 0; _jacobianOplusXj(2,4) = 0; _jacobianOplusXj(2,5) = -dq_dz ; // std::cerr << _jacobianOplusXi << std::endl; // std::cerr << _jacobianOplusXj << std::endl; // BaseBinaryEdge<3, Vector3d, VertexPointXYZ, VertexSE3Expmap, false>::linearizeOplus(); // std::cerr << _jacobianOplusXi << std::endl; // std::cerr << _jacobianOplusXj << std::endl; }
void EdgeProjectPSI2UV::linearizeOplus() { VertexSBAPointXYZ* vpoint = static_cast<VertexSBAPointXYZ*>(_vertices[0]); Vector3d psi_a = vpoint->estimate(); VertexSE3Expmap * vpose = static_cast<VertexSE3Expmap *>(_vertices[1]); SE3Quat T_cw = vpose->estimate(); VertexSE3Expmap * vanchor = static_cast<VertexSE3Expmap *>(_vertices[2]); const CameraParameters * cam = static_cast<const CameraParameters *>(parameter(0)); SE3Quat A_aw = vanchor->estimate(); SE3Quat T_ca = T_cw*A_aw.inverse(); Vector3d x_a = invert_depth(psi_a); Vector3d y = T_ca*x_a; Matrix<double,2,3> Jcam = d_proj_d_y(cam->focal_length, y); _jacobianOplus[0] = -Jcam*d_Tinvpsi_d_psi(T_ca, psi_a); _jacobianOplus[1] = -Jcam*d_expy_d_y(y); _jacobianOplus[2] = Jcam*T_ca.rotation().toRotationMatrix()*d_expy_d_y(x_a); }
void EdgeProjectXYZ2UV::linearizeOplus() { VertexSE3Expmap * vj = static_cast<VertexSE3Expmap *>(_vertices[1]); SE3Quat T(vj->estimate()); VertexSBAPointXYZ* vi = static_cast<VertexSBAPointXYZ*>(_vertices[0]); Vector3d xyz = vi->estimate(); Vector3d xyz_trans = T.map(xyz); double x = xyz_trans[0]; double y = xyz_trans[1]; double z = xyz_trans[2]; double z_2 = z*z; Matrix<double,2,3> tmp; tmp(0,0) = vj->_focal_length(0); tmp(0,1) = 0; tmp(0,2) = -x/z*vj->_focal_length(0); tmp(1,0) = 0; tmp(1,1) = vj->_focal_length(1); tmp(1,2) = -y/z*vj->_focal_length(1); _jacobianOplusXi = -1./z * tmp * T.rotation().toRotationMatrix(); _jacobianOplusXj(0,0) = x*y/z_2 *vj->_focal_length(0); _jacobianOplusXj(0,1) = -(1+(x*x/z_2)) *vj->_focal_length(0); _jacobianOplusXj(0,2) = y/z *vj->_focal_length(0); _jacobianOplusXj(0,3) = -1./z *vj->_focal_length(0); _jacobianOplusXj(0,4) = 0; _jacobianOplusXj(0,5) = x/z_2 *vj->_focal_length(0); _jacobianOplusXj(1,0) = (1+y*y/z_2) *vj->_focal_length(1); _jacobianOplusXj(1,1) = -x*y/z_2 *vj->_focal_length(1); _jacobianOplusXj(1,2) = -x/z *vj->_focal_length(1); _jacobianOplusXj(1,3) = 0; _jacobianOplusXj(1,4) = -1./z *vj->_focal_length(1); _jacobianOplusXj(1,5) = y/z_2 *vj->_focal_length(1); }
/** * \brief Jacobian for stereo projection */ void Edge_XYZ_VSC::linearizeOplus() { VertexSCam *vc = static_cast<VertexSCam *>(_vertices[1]); VertexSBAPointXYZ *vp = static_cast<VertexSBAPointXYZ *>(_vertices[0]); Vector4d pt, trans; pt.head<3>() = vp->estimate(); pt(3) = 1.0; trans.head<3>() = vc->estimate().translation(); trans(3) = 1.0; // first get the world point in camera coords Eigen::Matrix<double,3,1> pc = vc->w2n * pt; // Jacobians wrt camera parameters // set d(quat-x) values [ pz*dpx/dx - px*dpz/dx ] / pz^2 double px = pc(0); double py = pc(1); double pz = pc(2); double ipz2 = 1.0/(pz*pz); if (isnan(ipz2) ) { std::cout << "[SetJac] infinite jac" << std::endl; *(int *)0x0 = 0; } double ipz2fx = ipz2*vc->Kcam(0,0); // Fx double ipz2fy = ipz2*vc->Kcam(1,1); // Fy double b = vc->baseline; // stereo baseline Eigen::Matrix<double,3,1> pwt; // check for local vars pwt = (pt-trans).head<3>(); // transform translations, use differential rotation // dx Eigen::Matrix<double,3,1> dp = vc->dRdx * pwt; // dR'/dq * [pw - t] _jacobianOplusXj(0,3) = (pz*dp(0) - px*dp(2))*ipz2fx; _jacobianOplusXj(1,3) = (pz*dp(1) - py*dp(2))*ipz2fy; _jacobianOplusXj(2,3) = (pz*dp(0) - (px-b)*dp(2))*ipz2fx; // right image px // dy dp = vc->dRdy * pwt; // dR'/dq * [pw - t] _jacobianOplusXj(0,4) = (pz*dp(0) - px*dp(2))*ipz2fx; _jacobianOplusXj(1,4) = (pz*dp(1) - py*dp(2))*ipz2fy; _jacobianOplusXj(2,4) = (pz*dp(0) - (px-b)*dp(2))*ipz2fx; // right image px // dz dp = vc->dRdz * pwt; // dR'/dq * [pw - t] _jacobianOplusXj(0,5) = (pz*dp(0) - px*dp(2))*ipz2fx; _jacobianOplusXj(1,5) = (pz*dp(1) - py*dp(2))*ipz2fy; _jacobianOplusXj(2,5) = (pz*dp(0) - (px-b)*dp(2))*ipz2fx; // right image px // set d(t) values [ pz*dpx/dx - px*dpz/dx ] / pz^2 dp = -vc->w2n.col(0); // dpc / dx _jacobianOplusXj(0,0) = (pz*dp(0) - px*dp(2))*ipz2fx; _jacobianOplusXj(1,0) = (pz*dp(1) - py*dp(2))*ipz2fy; _jacobianOplusXj(2,0) = (pz*dp(0) - (px-b)*dp(2))*ipz2fx; // right image px dp = -vc->w2n.col(1); // dpc / dy _jacobianOplusXj(0,1) = (pz*dp(0) - px*dp(2))*ipz2fx; _jacobianOplusXj(1,1) = (pz*dp(1) - py*dp(2))*ipz2fy; _jacobianOplusXj(2,1) = (pz*dp(0) - (px-b)*dp(2))*ipz2fx; // right image px dp = -vc->w2n.col(2); // dpc / dz _jacobianOplusXj(0,2) = (pz*dp(0) - px*dp(2))*ipz2fx; _jacobianOplusXj(1,2) = (pz*dp(1) - py*dp(2))*ipz2fy; _jacobianOplusXj(2,2) = (pz*dp(0) - (px-b)*dp(2))*ipz2fx; // right image px // Jacobians wrt point parameters // set d(t) values [ pz*dpx/dx - px*dpz/dx ] / pz^2 dp = vc->w2n.col(0); // dpc / dx _jacobianOplusXi(0,0) = (pz*dp(0) - px*dp(2))*ipz2fx; _jacobianOplusXi(1,0) = (pz*dp(1) - py*dp(2))*ipz2fy; _jacobianOplusXi(2,0) = (pz*dp(0) - (px-b)*dp(2))*ipz2fx; // right image px dp = vc->w2n.col(1); // dpc / dy _jacobianOplusXi(0,1) = (pz*dp(0) - px*dp(2))*ipz2fx; _jacobianOplusXi(1,1) = (pz*dp(1) - py*dp(2))*ipz2fy; _jacobianOplusXi(2,1) = (pz*dp(0) - (px-b)*dp(2))*ipz2fx; // right image px dp = vc->w2n.col(2); // dpc / dz _jacobianOplusXi(0,2) = (pz*dp(0) - px*dp(2))*ipz2fx; _jacobianOplusXi(1,2) = (pz*dp(1) - py*dp(2))*ipz2fy; _jacobianOplusXi(2,2) = (pz*dp(0) - (px-b)*dp(2))*ipz2fx; // right image px }
int main(int argc, char** argv) { string inputFilename; string outputFilename; // command line parsing CommandArgs commandLineArguments; commandLineArguments.paramLeftOver("gm2dl-input", inputFilename, "", "gm2dl file which will be processed"); commandLineArguments.paramLeftOver("gm2dl-output", outputFilename, "", "name of the output file"); commandLineArguments.parseArgs(argc, argv); OptimizableGraph inputGraph; bool loadStatus = inputGraph.load(inputFilename.c_str()); if (! loadStatus) { cerr << "Error while loading input data" << endl; return 1; } OptimizableGraph outputGraph; // process all the vertices first double fx = -1; double baseline = -1; bool firstCam = true; for (OptimizableGraph::VertexIDMap::const_iterator it = inputGraph.vertices().begin(); it != inputGraph.vertices().end(); ++it) { if (dynamic_cast<VertexCam*>(it->second)) { VertexCam* v = static_cast<VertexCam*>(it->second); if (firstCam) { firstCam = false; g2o::ParameterCamera* camParams = new g2o::ParameterCamera; camParams->setId(0); const SBACam& c = v->estimate(); baseline = c.baseline; fx = c.Kcam(0,0); camParams->setKcam(c.Kcam(0,0), c.Kcam(1,1), c.Kcam(0,2), c.Kcam(1,2)); outputGraph.addParameter(camParams); } VertexSE3* ov = new VertexSE3; ov->setId(v->id()); Eigen::Isometry3d p; p = v->estimate().rotation(); p.translation() = v->estimate().translation(); ov->setEstimate(p); if (! outputGraph.addVertex(ov)) { assert(0 && "Failure adding camera vertex"); } } else if (dynamic_cast<VertexSBAPointXYZ*>(it->second)) { VertexSBAPointXYZ* v = static_cast<VertexSBAPointXYZ*>(it->second); VertexPointXYZ* ov = new VertexPointXYZ; ov->setId(v->id()); ov->setEstimate(v->estimate()); if (! outputGraph.addVertex(ov)) { assert(0 && "Failure adding camera vertex"); } } } for (OptimizableGraph::EdgeSet::const_iterator it = inputGraph.edges().begin(); it != inputGraph.edges().end(); ++it) { if (dynamic_cast<EdgeProjectP2SC*>(*it)) { EdgeProjectP2SC* e = static_cast<EdgeProjectP2SC*>(*it); EdgeSE3PointXYZDisparity* oe = new EdgeSE3PointXYZDisparity; oe->vertices()[0] = outputGraph.vertex(e->vertices()[1]->id()); oe->vertices()[1] = outputGraph.vertex(e->vertices()[0]->id()); double kx = e->measurement().x(); double ky = e->measurement().y(); double disparity = kx - e->measurement()(2); oe->setMeasurement(Eigen::Vector3d(kx, ky, disparity / (fx * baseline))); oe->setInformation(e->information()); // TODO convert information matrix oe->setParameterId(0,0); if (! outputGraph.addEdge(oe)) { assert(0 && "error adding edge"); } } } cout << "Vertices in/out:\t" << inputGraph.vertices().size() << " " << outputGraph.vertices().size() << endl; cout << "Edges in/out:\t" << inputGraph.edges().size() << " " << outputGraph.edges().size() << endl; cout << "Writing output ... " << flush; outputGraph.save(outputFilename.c_str()); cout << "done." << endl; return 0; }