Esempio n. 1
0
double compareT(Eigen::Isometry3d a, Eigen::Isometry3d b,
		Eigen::VectorXd weight)
{
	Eigen::Quaterniond qa(a.rotation());
	Eigen::Quaterniond qb(b.rotation());
	Eigen::Vector3d pa = a.translation();
	Eigen::Vector3d pb = b.translation();
	Eigen::VectorXd va(7), vb(7), verr(7), vScaled(7);
	va << pa, qa.x(), qa.y(), qa.z(), qa.w();
	vb << pb, qb.x(), qb.y(), qb.z(), qb.w();
	verr = vb - va;
	vScaled = weight.cwiseProduct(verr);
	return vScaled.squaredNorm();
}
void VideoIMUFusion::RunningData::handleVideoTrackerReport(
    const OSVR_TimeValue &timestamp, const OSVR_PoseReport &report) {
    Eigen::Isometry3d roomPose = takeCameraPoseToRoom(report.pose);

#ifdef OSVR_FPE
    FPExceptionEnabler fpe;
#endif
    Eigen::Quaterniond orientation(roomPose.rotation());
    auto oriChange = state().getQuaternion().angularDistance(orientation);
    if (std::abs(oriChange) > M_PI / 2.) {
        OSVR_DEV_VERBOSE("Throwing out a bad video pose");
        return;
    }
    if (preReport(timestamp)) {
        m_cameraMeasPos.setMeasurement(roomPose.translation());
        osvr::kalman::correct(state(), processModel(), m_cameraMeasPos);
        m_cameraMeasOri.setMeasurement(orientation);
        osvr::kalman::correct(state(), processModel(), m_cameraMeasOri);

#if 0
    OSVR_DEV_VERBOSE(
        "State: " << state().stateVector().transpose() << "\n"
                  << "Quat: "
                  << state().getQuaternion().coeffs().transpose()
                  << "\n"
                     "Error:\n"
                  << state().errorCovariance());
#endif
    }
}
Esempio n. 3
0
pcl::PointCloud<pcl::PointXYZ>::Ptr Conversions::toPointCloud(const Eigen::Isometry3d &T, const image_geometry::PinholeCameraModel &cam, const cv::Mat &depth_img)
{
    pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>());
    cloud->header.frame_id = "cam";
    cloud->is_dense = false;
    cloud->height = depth_img.rows;
    cloud->width = depth_img.cols;
    cloud->sensor_origin_ = Eigen::Vector4f( T.translation()(0), T.translation()(1), T.translation()(2), 1.f );
    cloud->sensor_orientation_ = Eigen::Quaternionf(T.rotation().cast<float>());
    cloud->points.resize( cloud->height * cloud->width );

    size_t idx = 0;
    const float* depthdata = reinterpret_cast<float*>( &depth_img.data[0] );
    for(int v = 0; v < depth_img.rows; ++v) {
        for(int u = 0; u < depth_img.cols; ++u) {
            pcl::PointXYZ& p = cloud->points[ idx ]; ++idx;

            float d = (*depthdata++);

            if (d > 0 && !isnan(d)) {
                p.z = d;
                p.x = (u - cam.cx()) * d / cam.fx();
                p.y = (v - cam.cy()) * d / cam.fy();
            } else {
                p.x = std::numeric_limits<float>::quiet_NaN();
                p.y = std::numeric_limits<float>::quiet_NaN();
                p.z = std::numeric_limits<float>::quiet_NaN();
            }
        }
    }
    return cloud;
}
Esempio n. 4
0
void print_Isometry3d(Eigen::Isometry3d pose, std::stringstream &ss){
  Eigen::Vector3d t(pose.translation());
  Eigen::Quaterniond r(pose.rotation());
  ss <<t[0]<<", "<<t[1]<<", "<<t[2]<<" | " 
       <<r.w()<<", "<<r.x()<<", "<<r.y()<<", "<<r.z() ;
  //  std::cout << ss.str() << "q\n";
}
Esempio n. 5
0
pcl::PointCloud<pcl::PointXYZRGBA>::Ptr Conversions::toPointCloudColor(const Eigen::Isometry3d &T, DepthImageDataPtr depth_data)
{
    pcl::PointCloud<pcl::PointXYZRGBA>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZRGBA>());
    cv::Mat depth_img;
    cv::Mat color_img;

    cv_bridge::CvImagePtr depth_bridge;
    cv_bridge::CvImagePtr rgb_bridge;
    try {
        depth_bridge = cv_bridge::toCvCopy(depth_data->depth_image_, sensor_msgs::image_encodings::TYPE_32FC1);
        depth_img = depth_bridge->image;
        rgb_bridge = cv_bridge::toCvCopy(depth_data->color_image_, sensor_msgs::image_encodings::BGR8);
        color_img = rgb_bridge->image;
    } catch (cv_bridge::Exception& e) {
        ROS_ERROR("cv_bridge exception: %s",e.what());
        return cloud;
    }

    // Get camera info for 3D point estimation.
    image_geometry::PinholeCameraModel cam = depth_data->camera_model_;

    cloud->header.frame_id = depth_data->sensor_frame_;
    cloud->is_dense = false;
    cloud->height = depth_img.rows;
    cloud->width = depth_img.cols;
    cloud->sensor_origin_ = Eigen::Vector4f( T.translation()(0), T.translation()(1), T.translation()(2), 1.f );
    cloud->sensor_orientation_ = Eigen::Quaternionf(T.rotation().cast<float>());
    cloud->points.resize( cloud->height * cloud->width );

    size_t idx = 0;
    const float* depthdata = reinterpret_cast<float*>( &depth_img.data[0] );
    const unsigned char* colordata = &color_img.data[0];
    for(int v = 0; v < depth_img.rows; ++v) {
        for(int u = 0; u < depth_img.cols; ++u) {
            pcl::PointXYZRGBA& p = cloud->points[ idx ]; ++idx;

            float d = (*depthdata++);

            if (d > 0 && !isnan(d)/* && p.z <= 7.5*/) {
                p.z = d;
                p.x = (u - cam.cx()) * d / cam.fx();
                p.y = (v - cam.cy()) * d / cam.fy();

                cv::Point3_<uchar>* rgb = color_img.ptr<cv::Point3_<uchar> >(v,u);
                p.b = rgb->x;
                p.g = rgb->y;
                p.r = rgb->z;
                p.a = 255;
            } else {
                p.x = std::numeric_limits<float>::quiet_NaN();;
                p.y = std::numeric_limits<float>::quiet_NaN();;
                p.z = std::numeric_limits<float>::quiet_NaN();;
                p.rgb = 0;
                colordata += 3;
            }
        }
    }

    return cloud;
}
Esempio n. 6
0
static Eigen::Affine3d toAffine(const Eigen::Isometry3d& pose)
{
  Eigen::Affine3d p(pose.rotation());
  p.translation() = pose.translation();

  return p;
}
static inline void
print_isometry(const Eigen::Isometry3d & iso)
{
  const Eigen::Vector3d & t = iso.translation();
  Eigen::Vector3d rpy = _quat_to_roll_pitch_yaw(Eigen::Quaterniond(iso.rotation()))*180.0/M_PI;
  fprintf(stderr, "trans:(% 6.3f % 6.3f % 6.3f) rot:(% 6.3f % 6.3f % 6.3f)",t(0),t(1),t(2),rpy(0),rpy(1),rpy(2));
  //    dbg("rot:(% 6.3f % 6.3f % 6.3f)",rpy(0),rpy(1),rpy(2));
}
Esempio n. 8
0
std::string print_Isometry3d(Eigen::Isometry3d pose){
  Eigen::Vector3d t(pose.translation());
  Eigen::Quaterniond r(pose.rotation());
  
  std::stringstream ss;
  ss <<t[0]<<", "<<t[1]<<", "<<t[2]<<", " 
       <<r.w()<<", "<<r.x()<<", "<<r.y()<<", "<<r.z() ;
  return ss.str();
}
Esempio n. 9
0
  // converts from a SE(3) Isometry transform to a 6D vector
  inline Eigen::Matrix<double, 6, 1> fromIsometry(const Eigen::Isometry3d & T){
    Eigen::Matrix<double, 6, 1> v;
    v.block<3,1>(0,0) = T.translation();

    Eigen::Quaterniond q(T.rotation());
    q.normalize();
    v.block<3,1>(3,0) = q.vec();

    return v;
  }
Esempio n. 10
0
void print_Isometry3d(Eigen::Isometry3d pose, std::stringstream &ss){
  Eigen::Vector3d t(pose.translation());
  Eigen::Quaterniond r(pose.rotation());
  double rpy[3];
  quat_to_euler(r, rpy[0], rpy[1], rpy[2]);
  
  ss <<t[0]<<", "<<t[1]<<", "<<t[2]<<" | " 
       <<r.w()<<", "<<r.x()<<", "<<r.y()<<", "<<r.z() << " | RPY "
       << rpy[0] <<", "<< rpy[1] <<", "<< rpy[2];
}
Esempio n. 11
0
std::string
isometryToString(const Eigen::Isometry3d& m)
{
  char result[80];
  memset(result, 0, sizeof(result));
  Eigen::Vector3d xyz = m.translation();
  Eigen::Vector3d rpy = m.rotation().eulerAngles(0, 1, 2);
  snprintf(result, 79, "%6.2f %6.2f %6.2f %6.2f %6.2f %6.2f",
      xyz(0), xyz(1), xyz(2),
      rpy(0) * 180/M_PI, rpy(1) * 180/M_PI, rpy(2) * 180/M_PI);
  return std::string(result);
}
Esempio n. 12
0
std::string print_Isometry3d(Eigen::Isometry3d pose){
  Eigen::Vector3d t(pose.translation());
  Eigen::Quaterniond r(pose.rotation());
  double rpy[3];
  quat_to_euler(r, rpy[0], rpy[1], rpy[2]);
  
  std::stringstream ss;
  ss <<t[0]<<", "<<t[1]<<", "<<t[2]<<", " 
       <<r.w()<<", "<<r.x()<<", "<<r.y()<<", "<<r.z() << ", "
       << rpy[0] <<", "<< rpy[1] <<", "<< rpy[2];
  return ss.str();
}
bot_core::pose_t getPoseAsBotPose(Eigen::Isometry3d pose, int64_t utime){
  bot_core::pose_t pose_msg;
  pose_msg.utime =   utime;
  pose_msg.pos[0] = pose.translation().x();
  pose_msg.pos[1] = pose.translation().y();
  pose_msg.pos[2] = pose.translation().z();  
  Eigen::Quaterniond r_x(pose.rotation());
  pose_msg.orientation[0] =  r_x.w();  
  pose_msg.orientation[1] =  r_x.x();  
  pose_msg.orientation[2] =  r_x.y();  
  pose_msg.orientation[3] =  r_x.z();  
  return pose_msg;
}
Esempio n. 14
0
OdomTransPtr RGBDVisOdometry::Isometry3DToOdomTrans(Eigen::Isometry3d info)
{
  Eigen::Vector3d xyz = info.translation();
  Eigen::Vector3d rpy = info.rotation().eulerAngles(0, 1, 2);

  OdomTransPtr odomInfo(new OdomTrans);
  odomInfo->x = xyz(0);
  odomInfo->y = xyz(1);
  odomInfo->z = xyz(2);
  odomInfo->alpha = rpy(0);
  odomInfo->beta = rpy(1);
  odomInfo->gamma = rpy(2);
  return odomInfo;
}
Esempio n. 15
0
void compute_velocity(const Eigen::Vector3d& v_parent,
                      const Eigen::Vector3d& w_parent,
                      const Eigen::Vector3d& v_rel,
                      const Eigen::Vector3d& w_rel,
                      const Eigen::Vector3d& offset,
                      const Eigen::Isometry3d& tf_parent,
                      Eigen::Vector3d& v_child,
                      Eigen::Vector3d& w_child)
{
  const Eigen::Matrix3d& R = tf_parent.rotation();

  v_child = v_parent + R*v_rel + w_parent.cross(R*offset);

  w_child = w_parent + R*w_rel;
}
Esempio n. 16
0
void App::doWork(){

  std::vector<float> xA;
  std::vector<float> yA;
  readCSVFile(cl_cfg_.filenameA, xA, yA);
  std::cout << xA.size() << " points in File A\n";
  lidarOdom_->doOdometry(xA, yA, xA.size(), 0);

  // Match second scan without a prior
  std::string i;
  cout << "Match B: Continue? ";
  cin >> i;

  std::vector<float> xB;
  std::vector<float> yB;
  readCSVFile(cl_cfg_.filenameB, xB, yB);
  std::cout << xB.size() << " points in File B\n";
  lidarOdom_->doOdometry(xB, yB, xB.size(), 1);

  // Match third scan giving a prior rotation for the heading
  // this alignment would otherwise fail:
  cout << "Match C: Continue? ";
  cin >> i;

  std::vector<float> xC;
  std::vector<float> yC;
  readCSVFile(cl_cfg_.filenameC, xC, yC);
  std::cout << xC.size() << " points in File C\n";

  ScanTransform prior;
  prior.x = 0;
  prior.y = 0;
  prior.theta = 1.7;
  lidarOdom_->doOdometry(xC, yC, xC.size(), 2, &prior);



  // 2. Determine the body position using the LIDAR motion estimate:
  Eigen::Isometry3d pose = lidarOdom_->getCurrentPose();
  Eigen::Quaterniond orientation(pose.rotation());

  Eigen::Vector3d rpy = orientation.matrix().eulerAngles(0,1,2);

  std::cout << "\n";
  std::cout << "x,y,yaw (m,m,deg): "<< pose.translation().x() << ", " << pose.translation().y()
            << ", "
            << rpy[2]*180/M_PI << "\n";
}
Esempio n. 17
0
void joints2frames::publishPose(Eigen::Isometry3d pose, int64_t utime, std::string channel){
  if (!shouldPublish(utime, channel) )
    return; 
  
  bot_core::pose_t pose_msg;
  pose_msg.utime =   utime;
  pose_msg.pos[0] = pose.translation().x();
  pose_msg.pos[1] = pose.translation().y();
  pose_msg.pos[2] = pose.translation().z();  
  Eigen::Quaterniond r_x(pose.rotation());
  pose_msg.orientation[0] =  r_x.w();  
  pose_msg.orientation[1] =  r_x.x();  
  pose_msg.orientation[2] =  r_x.y();  
  pose_msg.orientation[3] =  r_x.z();  
  lcm_->publish( channel, &pose_msg);
}
Esempio n. 18
0
void joints2frames::publishRigidTransform(Eigen::Isometry3d pose, int64_t utime, std::string channel){
  if (!shouldPublish(utime, channel) )
    return; 

  bot_core::rigid_transform_t tf;
  tf.utime = utime;
  tf.trans[0] = pose.translation().x();
  tf.trans[1] = pose.translation().y();
  tf.trans[2] = pose.translation().z();
  Eigen::Quaterniond quat(pose.rotation());
  tf.quat[0] = quat.w();
  tf.quat[1] = quat.x();
  tf.quat[2] = quat.y();
  tf.quat[3] = quat.z();
  lcm_->publish(channel, &tf);    
}
Esempio n. 19
0
crazyflie_t::webcam_pos_t encodeWebcamPos(Eigen::Isometry3d const & frame) 
{
    Eigen::Vector3d t(frame.translation());
    Eigen::Quaterniond r(frame.rotation());
    Eigen::Vector3d rpy = quat_to_rpy(r);

    crazyflie_t::webcam_pos_t msg;
    msg.x = frame.translation().x();
    msg.y = frame.translation().y();
    msg.z = frame.translation().z();

    msg.roll = rpy(0);
    msg.pitch = rpy(1);
    msg.yaw = rpy(2);

    return msg;
}
Esempio n. 20
0
crazytags::rigid_transform_t encodeLCMFrame(Eigen::Isometry3d const & frame) 
{
    Eigen::Vector3d t(frame.translation());
    Eigen::Quaterniond r(frame.rotation());

    crazytags::rigid_transform_t msg;
    msg.quat[0] = r.w();
    msg.quat[1] = r.x();
    msg.quat[2] = r.y();
    msg.quat[3] = r.z();

    msg.trans[0] = t[0];
    msg.trans[1] = t[1];
    msg.trans[2] = t[2];

    return msg;
}
inline
cv::Mat cvtIsometry_egn2ocv(const Eigen::Isometry3d& egn_o)
{
    Eigen::Matrix3d eigenRotation = egn_o.rotation();
    Eigen::Matrix<double,3,1> eigenTranslation = egn_o.translation();

    cv::Mat R, t;
    eigen2cv(eigenRotation, R);
    eigen2cv(eigenTranslation, t);
    t = t.reshape(1,3);

    cv::Mat ocv_o = cv::Mat::eye(4,4,CV_64FC1);
    R.copyTo(ocv_o(cv::Rect(0,0,3,3)));
    t.copyTo(ocv_o(cv::Rect(3,0,1,3)));

    return ocv_o;
}
Esempio n. 22
0
Eigen::Isometry3d SmoothEstimatePropagator::SmoothPropagateAction::
  exponencialInterpolation(const Eigen::Isometry3d& from, const Eigen::Isometry3d& to, double step) const
{
  Eigen::Isometry3d res;

  double maxdist = maxDistance-2;

  // step goes from 1 to maxDistance, we need x from 0 to 1 in a linear way.
  double x = 1 - ((maxdist - (step-1))/maxdist);
  // alpha in [0, inf) describes the explonential ramp "steepness"
  double alpha = 50;
  // exponential ramp from 0 to 1
  double exp_ramp = 1 - (x/(1+alpha*(1-x)));

  // using quaternion representation and slerp for interpolate between from and to isometry transformations
  res.linear() = (Eigen::Quaterniond(from.rotation()).slerp(exp_ramp, Eigen::Quaterniond(to.rotation()))).toRotationMatrix();
  res.translation() = (1 - exp_ramp) * from.translation() + exp_ramp * to.translation();

  return res;
}
Esempio n. 23
0
void compute_acceleration(const Eigen::Vector3d& a_parent,
                          const Eigen::Vector3d& alpha_parent,
                          const Eigen::Vector3d& w_parent,
                          const Eigen::Vector3d& a_rel,
                          const Eigen::Vector3d& alpha_rel,
                          const Eigen::Vector3d& v_rel,
                          const Eigen::Vector3d& w_rel,
                          const Eigen::Vector3d& offset,
                          const Eigen::Isometry3d& tf_parent,
                          Eigen::Vector3d& a_child,
                          Eigen::Vector3d& alpha_child)
{
  const Eigen::Matrix3d& R = tf_parent.rotation();

  a_child = a_parent + R*a_rel
          + alpha_parent.cross(R*offset)
          + 2*w_parent.cross(R*v_rel)
          + w_parent.cross(w_parent.cross(R*offset));

  alpha_child = alpha_parent + R*alpha_rel + w_parent.cross(R*w_rel);
}
Esempio n. 24
0
void VoEstimator::publishPose(int64_t utime, std::string channel, Eigen::Isometry3d pose,
  Eigen::Vector3d vel_lin, Eigen::Vector3d vel_ang){
  Eigen::Quaterniond r(pose.rotation());
  bot_core::pose_t pose_msg;
  pose_msg.utime =   utime;
  pose_msg.pos[0] = pose.translation().x();
  pose_msg.pos[1] = pose.translation().y();
  pose_msg.pos[2] = pose.translation().z();
  pose_msg.orientation[0] =  r.w();
  pose_msg.orientation[1] =  r.x();
  pose_msg.orientation[2] =  r.y();
  pose_msg.orientation[3] =  r.z();
  pose_msg.vel[0] = vel_lin(0);
  pose_msg.vel[1] = vel_lin(1);
  pose_msg.vel[2] = vel_lin(2);
  pose_msg.rotation_rate[0] = vel_ang(0);
  pose_msg.rotation_rate[1] = vel_ang(1);
  pose_msg.rotation_rate[2] = vel_ang(2);
  pose_msg.accel[0]=0; // not estimated or filled in
  pose_msg.accel[1]=0;
  pose_msg.accel[2]=0;
  lcm_->publish( channel, &pose_msg);
}
Esempio n. 25
0
bool poseEstimationDirect ( const vector< Measurement >& measurements, cv::Mat* gray, Eigen::Matrix3f& K, Eigen::Isometry3d& Tcw )
{
    // 初始化g2o
    typedef g2o::BlockSolver<g2o::BlockSolverTraits<6,1>> DirectBlock;  // 求解的向量 顶点(姿态) 是6*1的
    DirectBlock::LinearSolverType* linearSolver = new g2o::LinearSolverDense< DirectBlock::PoseMatrixType > ();
    DirectBlock* solver_ptr = new DirectBlock ( linearSolver );
    // g2o::OptimizationAlgorithmGaussNewton* solver = new g2o::OptimizationAlgorithmGaussNewton( solver_ptr ); // G-N  高斯牛顿
    g2o::OptimizationAlgorithmLevenberg* solver = new g2o::OptimizationAlgorithmLevenberg ( solver_ptr ); // L-M                 
    g2o::SparseOptimizer optimizer;  
    optimizer.setAlgorithm ( solver );
    optimizer.setVerbose( true );

    // 添加顶点
    g2o::VertexSE3Expmap* pose = new g2o::VertexSE3Expmap();//位姿
    pose->setEstimate ( g2o::SE3Quat ( Tcw.rotation(), Tcw.translation() ) );//旋转矩阵 和 平移向量
    pose->setId ( 0 );//id
    optimizer.addVertex ( pose );//添加顶点

    // 添加边
    int id=1;
    for ( Measurement m: measurements )
    {
        EdgeSE3ProjectDirect* edge = new EdgeSE3ProjectDirect (
            m.pos_world,//3D 位置
            K ( 0,0 ), K ( 1,1 ), K ( 0,2 ), K ( 1,2 ), gray//相机内参数   灰度图
        );
        edge->setVertex ( 0, pose );//顶点
        edge->setMeasurement ( m.grayscale );//测量值为真是灰度值
        edge->setInformation ( Eigen::Matrix<double,1,1>::Identity() );//误差 权重 信息矩阵
        edge->setId ( id++ );
        optimizer.addEdge ( edge );
    }
    cout<<"边的数量 edges in graph: "<<optimizer.edges().size() <<endl;
    optimizer.initializeOptimization();//优化初始化
    optimizer.optimize ( 30 );//最大优化次数
    Tcw = pose->estimate();// 变换矩阵
}
Esempio n. 26
0
 Eigen::Matrix3d eigenRotation() const
 { return camera_transform.rotation(); }
Esempio n. 27
0
Eigen::Vector2d isometry2distance(const Eigen::Isometry3d& t) {
  return Eigen::Vector2d(t.translation().norm(),
			 fabs(Eigen::AngleAxisd(t.rotation()).angle()));
}
Esempio n. 28
0
  bool setChannels(const std::string& iInputChannel,
                   const std::string& iOutputChannel) {
    if (mSubscription != NULL) {
      mLcm->unsubscribe(mSubscription);
    }

    mInputChannel = iInputChannel;
    mOutputChannel = iOutputChannel;
    BotCamTrans* inputCamTrans =
      bot_param_get_new_camtrans(mBotWrapper->getBotParam(),
                                 mInputChannel.c_str());
    if (inputCamTrans == NULL) {
      std::cout << "error: cannot find camera " << mInputChannel << std::endl;
      return false;
    }
    BotCamTrans* outputCamTrans =
      bot_param_get_new_camtrans(mBotWrapper->getBotParam(),
                                 mOutputChannel.c_str());
    if (outputCamTrans == NULL) {
      std::cout << "error: cannot find camera " << mOutputChannel << std::endl;
      return false;
    }

    int inputWidth = bot_camtrans_get_width(inputCamTrans);
    int inputHeight = bot_camtrans_get_height(inputCamTrans);
    mOutputWidth = bot_camtrans_get_width(outputCamTrans);
    mOutputHeight = bot_camtrans_get_height(outputCamTrans);

    // precompute warp field
    mWarpFieldIntX.resize(mOutputWidth*mOutputHeight);
    mWarpFieldIntY.resize(mWarpFieldIntX.size());
    mWarpFieldFrac00.resize(mWarpFieldIntX.size());
    mWarpFieldFrac01.resize(mWarpFieldIntX.size());
    mWarpFieldFrac10.resize(mWarpFieldIntX.size());
    mWarpFieldFrac11.resize(mWarpFieldIntX.size());
    Eigen::Isometry3d outputToInput;
    if (!mBotWrapper->getTransform(mOutputChannel, mInputChannel,
                                   outputToInput)) {
      std::cout << "error: cannot get transform from " << mOutputChannel <<
        " to " << mInputChannel << std::endl;
      return false;
    }
    Eigen::Matrix3d rotation = outputToInput.rotation();

    Eigen::Vector3d ray;
    for (int i = 0, pos = 0; i < mOutputHeight; ++i) {
      for (int j = 0; j < mOutputWidth; ++j, ++pos) {
        mWarpFieldIntX[pos] = -1;
        if (0 != bot_camtrans_unproject_pixel(outputCamTrans, j, i,
                                              ray.data())) {
          continue;
        }
        ray = rotation*ray;
        double pix[3];
        if (0 != bot_camtrans_project_point(inputCamTrans, ray.data(), pix)) {
          continue;
        }
        if ((pix[2] < 0) ||
            (pix[0] < 0) || (pix[0] >= inputWidth-1) ||
            (pix[1] < 0) || (pix[1] >= inputHeight-1)) {
          continue;
        }
        mWarpFieldIntX[pos] = (int)pix[0];
        mWarpFieldIntY[pos] = (int)pix[1];
        double fracX = pix[0] - mWarpFieldIntX[pos];
        double fracY = pix[1] - mWarpFieldIntY[pos];
        mWarpFieldFrac00[pos] = (1-fracX)*(1-fracY);
        mWarpFieldFrac01[pos] = (1-fracX)*fracY;
        mWarpFieldFrac10[pos] = fracX*(1-fracY);
        mWarpFieldFrac11[pos] = fracX*fracY;
      }
    }

    mLcm->subscribe(mInputChannel, &ImageWarper::onImage, this);
    return true;
  }