Exemplo n.º 1
0
    Box(Eigen::Vector4d &center, Eigen::Vector4d& u, Eigen::Vector4d &v, 
	Eigen::Vector4d &w, unsigned int id, unsigned int matId, 
	double uWidth, double vWidth, double wWidth)
	: QuadricCollection(center, id, matId), u(u), v(v), w(w)  {
	BOOST_ASSERT_MSG(u[3] == 0 && v[3] == 0 && w[3] == 0, "u,v,w must have"
			 " fourth coordinate equal to zero!");//  Got u:\n"
			 // << u << "v:\n" << v << "w:\n" << w)

	// Prepare rotation matrix
	Eigen::Matrix4d R;
	R.row(0) = u;
	R.row(1) =  v;
	R.row(2) =  w;
	R.row(3) = Eigen::Vector4d(0, 0, 0, 1);
	
	/* Make all the planes forming the box (centered at origin). 
	   Plane normals will be unit vectors pointing in positive/negative
	   x, y, z directions. The points x on the plane with normal 
	   n = (a,b,c), distance d to origin satisfy n.p -d = 0, 
	   or x^T Q x = 0 where 
	   Q = |0   0   0   a/2|
	       |0   0   0   b/2|
	       |0   0   0   c/2|
	       |a/2 b/2 c/2  -d|
	  We define planes w.r.t. x, y, z axes, then rotate to u,v,w
	 */
	Eigen::Matrix4d posWPlane, negWPlane, posUPlane, negUPlane,
	    posVPlane, negVPlane;
	posWPlane = negWPlane = posUPlane = negUPlane = posVPlane =  negVPlane
	    = Eigen::Matrix4d::Zero();
	posUPlane.row(3) = posUPlane.col(3) = Eigen::Vector4d(0.5, 0, 0, -uWidth/2.0);
	negUPlane.row(3) = negUPlane.col(3) = Eigen::Vector4d(-0.5, 0, 0, -uWidth/2.0);

	posVPlane.row(3) = posVPlane.col(3) = Eigen::Vector4d(0, 0.5, 0, -vWidth/2.0);
	negVPlane.row(3) = negVPlane.col(3) = Eigen::Vector4d(0, -0.5, 0, -vWidth/2.0);

	posWPlane.row(3) = posWPlane.col(3) = Eigen::Vector4d(0, 0, 0.5, -wWidth/2.0);
	negWPlane.row(3) = negWPlane.col(3) = Eigen::Vector4d(0, 0, -0.5, -wWidth/2.0);

	addQuadric(R.transpose() * posWPlane * R);
	addQuadric(R.transpose() * negWPlane * R);
	addQuadric(R.transpose() * posUPlane * R);
	addQuadric(R.transpose() * negUPlane * R);
	addQuadric(R.transpose() * posVPlane * R);
	addQuadric(R.transpose() * negVPlane * R);
	
    }
Exemplo n.º 2
0
const CPoint3DCAMERA CMiniVisionToolbox::getPointStereoLinearTriangulationSVDDLT( const cv::Point2d& p_ptPointLEFT, const cv::Point2d& p_ptPointRIGHT, const Eigen::Matrix< double, 3, 4 >& p_matProjectionLEFT, const Eigen::Matrix< double, 3, 4 >& p_matProjectionRIGHT )
{
    //ds A matrix for system: A*X=0
    Eigen::Matrix4d matAHomogeneous;

    matAHomogeneous.row(0) = p_ptPointLEFT.x*p_matProjectionLEFT.row(2)-p_matProjectionLEFT.row(0);
    matAHomogeneous.row(1) = p_ptPointLEFT.y*p_matProjectionLEFT.row(2)-p_matProjectionLEFT.row(1);
    matAHomogeneous.row(2) = p_ptPointRIGHT.x*p_matProjectionRIGHT.row(2)-p_matProjectionRIGHT.row(0);
    matAHomogeneous.row(3) = p_ptPointRIGHT.y*p_matProjectionRIGHT.row(2)-p_matProjectionRIGHT.row(1);

    //ds solve system subject to ||A*x||=0 ||x||=1
    const Eigen::JacobiSVD< Eigen::Matrix4d > matSVD( matAHomogeneous, Eigen::ComputeFullU | Eigen::ComputeFullV );

    //ds solution x is the last column of V
    const Eigen::Vector4d vecX( matSVD.matrixV( ).col( 3 ) );

    return vecX.head( 3 )/vecX(3);
}
Exemplo n.º 3
0
void Data4Viewer::drawGlobalView()
{
	_pKinect->_pRGBCamera->setGLProjectionMatrix(0.1f, 100.f);

	glMatrixMode(GL_MODELVIEW);
	Eigen::Affine3d tmp; tmp.setIdentity();
	Eigen::Matrix4d mMat;
	mMat.row(0) = tmp.matrix().row(0);
	mMat.row(1) = -tmp.matrix().row(1);
	mMat.row(2) = -tmp.matrix().row(2);
	mMat.row(3) = tmp.matrix().row(3);

	glLoadMatrixd(mMat.data());
	GpuMat rgb(_pKinect->_cvmRGB);
	_pKinect->_pRGBCamera->renderCameraInLocal(rgb, _pGL.get(), false, NULL, 0.2f, true); //render in model coordinate
	//cout<<("drawRGBView");
	return;
}
Exemplo n.º 4
0
Eigen::Matrix4d createHomogeneousTransformMatrix(tf::StampedTransform transform)
{
	tf::Point p = transform.getOrigin();
	tf::Quaternion q = transform.getRotation();
	tf::Matrix3x3 R1(q);
	Eigen::Matrix3d R2;
	tf::matrixTFToEigen(R1, R2);
	ROS_INFO_STREAM("R2:\n"<<R2);

	Eigen::Matrix4d T;
	T.block(0, 0, 3, 3) << R2;
	T.block(0, 3, 3, 1) << p.x(), p.y(), p.z();
	T.row(3) << 0, 0, 0, 1;
	return T;
}
Exemplo n.º 5
0
void
pcl::visualization::Camera::computeViewMatrix(Eigen::Matrix4d& view_mat) const
{
//constructs view matrix from camera pos, view up, and the point it is looking at
//this code is based off of gluLookAt http://www.opengl.org/wiki/GluLookAt_code
	Eigen::Vector3d focal_point (focal[0], focal[1], focal[2]);
	Eigen::Vector3d posv        (pos[0]  , pos[1]  , pos[2]);
	Eigen::Vector3d up          (view[0] , view[1] , view[2]);

	Eigen::Vector3d zAxis = (focal_point - posv).normalized();
  Eigen::Vector3d xAxis = zAxis.cross(up).normalized();
  // make sure the y-axis is orthogonal to the other two
  Eigen::Vector3d yAxis = xAxis.cross (zAxis);

	view_mat.block <1, 3> (0, 0) = xAxis;
	view_mat.block <1, 3> (1, 0) = yAxis;
	view_mat.block <1, 3> (2, 0) = -zAxis;
	view_mat.row (3) << 0, 0, 0, 1;

	view_mat.block <3, 1> (0, 3) = view_mat.topLeftCorner<3, 3> () * (-posv);
}
Exemplo n.º 6
0
int main(int argc, char** argv)
{
  // initialize ROS
  ros::init(argc, argv, "find_grasps");
  ros::NodeHandle node("~");
  
  GraspLocalizer::Parameters params;
  
  // camera transforms (poses)
  Eigen::Matrix4d base_tf, sqrt_tf;
  base_tf <<  0, 0.445417, 0.895323, 0.215, 
              1, 0, 0, -0.015, 
              0, 0.895323, -0.445417, 0.23, 
              0, 0, 0, 1;
  sqrt_tf <<  0.9366, -0.0162, 0.3500, -0.2863, 
              0.0151, 0.9999, 0.0058, 0.0058, 
              -0.3501, -0.0002, 0.9367, 0.0554, 
              0, 0, 0, 1;
  params.cam_tf_left_ = base_tf * sqrt_tf.inverse();
  params.cam_tf_right_ = base_tf * sqrt_tf;

  // read ROS parameters
  std::string cloud_topic;
  std::string cloud_frame;
  std::string svm_file_name;
  std::vector<double> workspace;
  std::vector<double> camera_pose;
  int cloud_type;
  bool parallel;

  node.param("parallel", parallel, true);
  node.param("cloud_topic", cloud_topic, CLOUD_TOPIC);
  node.param("cloud_frame", cloud_frame, CLOUD_FRAME);
  node.param("cloud_type", cloud_type, CLOUD_TYPE);
  node.param("svm_file_name", svm_file_name, SVM_FILE_NAME);
  node.param("num_threads", params.num_threads_, NUM_THREADS);
  node.param("num_samples", params.num_samples_, NUM_SAMPLES); 
  node.param("num_clouds", params.num_clouds_, NUM_CLOUDS);

  if (parallel) {
      double finger_width, hand_outer_diameter, hand_depth;
      node.param("finger_width", finger_width, FINGER_WIDTH);
      node.param("hand_outer_diameter", hand_outer_diameter, HAND_OUTER_DIAMETER);
      node.param("hand_depth", hand_depth, HAND_DEPTH);
      params.finger_hand_ = new ParallelHand(finger_width, hand_outer_diameter, hand_depth);
  }
  //TODO else

  node.param("init_bite", params.init_bite_, INIT_BITE);
  node.param("hand_height", params.hand_height_, HAND_HEIGHT);
  node.param("min_inliers", params.min_inliers_, MIN_HANDLE_INLIERS);
  node.getParam("workspace", workspace);
  node.getParam("camera_pose", camera_pose);
  node.param("plotting", params.plotting_mode_, 0);
  node.param("marker_lifetime", params.marker_lifetime_, 0.0);
  
  Eigen::Matrix4d R;
  for (int i=0; i < R.rows(); i++)
    R.row(i) << camera_pose[i*R.cols()], camera_pose[i*R.cols() + 1], camera_pose[i*R.cols() + 2], camera_pose[i*R.cols() + 3];  
    
  Eigen::VectorXd ws(6);
  ws << workspace[0], workspace[1], workspace[2], workspace[3], workspace[4], workspace[5];
  params.workspace_ = ws;
  
  std::cout << "-- Parameters --\n";
  std::cout << " Input\n";
  std::cout << "  cloud_topic: " << cloud_topic << "\n";
  std::cout << "  cloud_frame: " << cloud_frame << "\n";
  std::cout << "  cloud_type: " << CLOUD_TYPES[cloud_type] << "\n";
  std::cout << " Hand Search\n";  
  std::cout << "  workspace: " << ws.transpose() << "\n";
  std::cout << "  num_samples: " << params.num_samples_ << "\n";
  std::cout << "  num_threads: " << params.num_threads_ << "\n";
  std::cout << "  num_clouds: " << params.num_clouds_ << "\n";  
  std::cout << "  camera pose:\n" << R << std::endl;
  std::cout << " Robot Hand Model\n";
  //TODO: Make FingerGrasp printable.
 // std::cout << "  finger_width: " << params.finger_width_ << "\n";
 // std::cout << "  hand_outer_diameter: " << params.hand_outer_diameter_ << "\n";
 // std::cout << "  hand_depth: " << params.finger_width_ << "\n";
 // std::cout << "  init_bite: " << params.finger_width_ << "\n";
 // std::cout << "  hand_height: " << params.finger_width_ << "\n";
  std::cout << " Antipodal Grasps Prediction\n";
  std::cout << "  svm_file_name: " << svm_file_name << "\n";
  std::cout << " Handle Search\n";
  std::cout << "  min_inliers: " << params.min_inliers_ << "\n";
  std::cout << " Visualization\n";
  std::cout << "  plot_mode: " << PLOT_MODES[params.plotting_mode_] << "\n";
  std::cout << "  marker_lifetime: " << params.marker_lifetime_ << "\n";
  
  GraspLocalizer loc(node, cloud_topic, cloud_frame, cloud_type, svm_file_name, params);
  loc.localizeGrasps();
  
	return 0;
}