void FiducialsNode::location_cb(int id, double x, double y, double z,
    double bearing) {
    ROS_INFO("location_announce:id=%d x=%f y=%f bearing=%f",
      id, x, y, bearing * 180. / 3.1415926);

    visualization_msgs::Marker marker = createMarker(position_namespace, id);
    ros::Time now = marker.header.stamp;

    marker.type = visualization_msgs::Marker::ARROW;
    marker.action = visualization_msgs::Marker::ADD;

    marker.pose = scale_position(x, y, z, bearing);

    marker.scale.x = 0.2 / scale;
    marker.scale.y = 0.05 / scale;
    marker.scale.z = 0.05 / scale;

    marker.color = position_color;

    marker.lifetime = ros::Duration();

    marker_pub->publish(marker);

    // TODO: subtract out odometry position, and publish transform from
    //  map to odom
    double tf_x = marker.pose.position.x;
    double tf_y = marker.pose.position.y;
    double tf_yaw = bearing;

    // publish a transform based on the position
    if( use_odom ) {
      // if we're using odometry, look up the odom transform and subtract it
      //  from our position so that we can publish a map->odom transform
      //  such that map->odom->base_link reports the correct position
      std::string tf_err;
      if( tf_buffer.canTransform(pose_frame, odom_frame, now,
            ros::Duration(0.1), &tf_err ) ) {
        // get odometry position from TF
        tf2::Quaternion tf_quat;
        tf_quat.setRPY(0.0, 0.0, tf_yaw);

        tf2::Transform pose(tf_quat, tf2::Vector3(tf_x, tf_y, 0));

        // look up camera transform if we can
        if( last_camera_frame.length() > 0 ) {
          if( tf_buffer.canTransform(pose_frame, last_camera_frame, now,
                ros::Duration(0.1), &tf_err) ) {
            geometry_msgs::TransformStamped camera_tf;
            camera_tf = tf_buffer.lookupTransform(pose_frame,
                                                    last_camera_frame, now);
            tf2::Transform camera = msg_to_tf(camera_tf);
            pose = pose * camera.inverse();
          } else {
            ROS_ERROR("Cannot look up transform from %s to %s: %s",
                pose_frame.c_str(), last_camera_frame.c_str(), tf_err.c_str());
          }
        }

        geometry_msgs::TransformStamped odom;
        odom = tf_buffer.lookupTransform(odom_frame, pose_frame, now);
        tf2::Transform odom_tf = msg_to_tf(odom);

        // M = C * O
        // C^-1 * M = O
        // C^-1 = O * M-1
        tf2::Transform odom_correction = (odom_tf * pose.inverse()).inverse();

        geometry_msgs::TransformStamped transform;
        tf2::Vector3 odom_correction_v = odom_correction.getOrigin();
        transform.transform.translation.x = odom_correction_v.getX();
        transform.transform.translation.y = odom_correction_v.getY();
        transform.transform.translation.z = odom_correction_v.getZ();

        tf2::Quaternion odom_correction_q = odom_correction.getRotation();
        transform.transform.rotation.x = odom_correction_q.getX();
        transform.transform.rotation.y = odom_correction_q.getY();
        transform.transform.rotation.z = odom_correction_q.getZ();
        transform.transform.rotation.w = odom_correction_q.getW();

        transform.header.stamp = now;
        transform.header.frame_id = world_frame;
        transform.child_frame_id = odom_frame;
        //tf2::transformTF2ToMsg(odom_correction, transform, now, world_frame,
            //odom_frame);

        if (publish_tf)
            tf_pub.sendTransform(transform);
      } else {
        ROS_ERROR("Can't look up base transform from %s to %s: %s",
            pose_frame.c_str(),
            odom_frame.c_str(),
            tf_err.c_str());
      }
    } else {
      // we're publishing absolute position
      geometry_msgs::TransformStamped transform;
      transform.header.stamp = now;
      transform.header.frame_id = world_frame;
      transform.child_frame_id = pose_frame;

      transform.transform.translation.x = tf_x;
      transform.transform.translation.y = tf_y;
      transform.transform.translation.z = 0.0;
      transform.transform.rotation = tf::createQuaternionMsgFromYaw(tf_yaw);

      if (publish_tf)
          tf_pub.sendTransform(transform);
    }
}
Ejemplo n.º 2
0
void TFDisplay::updateFrame(FrameInfo* frame)
{
  tf::Stamped<tf::Pose> pose( btTransform( btQuaternion( 0, 0, 0 ), btVector3( 0, 0, 0 ) ), ros::Time(), frame->name_ );

  if (tf_->canTransform(fixed_frame_, frame->name_, ros::Time()))
  {
    try
    {
      tf_->transformPose( fixed_frame_, pose, pose );
    }
    catch(tf::TransformException& e)
    {
      ROS_ERROR( "Error transforming frame '%s' to frame '%s'\n", frame->name_.c_str(), fixed_frame_.c_str() );
    }
  }

  frame->position_ = Ogre::Vector3( pose.getOrigin().x(), pose.getOrigin().y(), pose.getOrigin().z() );
  frame->robot_space_position_ = frame->position_;
  robotToOgre( frame->position_ );

  btQuaternion quat;
  pose.getBasis().getRotation( quat );
  frame->orientation_ = Ogre::Quaternion( quat.w(), quat.x(), quat.y(), quat.z() );
  frame->robot_space_orientation_ = frame->orientation_;
  robotToOgre( frame->orientation_ );

  frame->axes_->setPosition( frame->position_ );
  frame->axes_->setOrientation( frame->orientation_ );

  frame->name_node_->setPosition( frame->position_ );

  frame->position_property_->changed();
  frame->orientation_property_->changed();

  std::string old_parent = frame->parent_;
  frame->parent_.clear();
  bool has_parent = tf_->getParent( frame->name_, ros::Time(), frame->parent_ );
  if ( has_parent )
  {
    if ( !frame->tree_property_ || old_parent != frame->parent_ )
    {
      M_FrameInfo::iterator parent_it = frames_.find( frame->parent_ );

      if ( parent_it != frames_.end() )
      {
        FrameInfo* parent = parent_it->second;

        if ( parent->tree_property_ )
        {
          property_manager_->deleteProperty( frame->tree_property_ );
          frame->tree_property_ = property_manager_->createCategory( frame->name_, property_prefix_ + frame->name_ + "Tree", parent->tree_property_, this );
        }
      }
    }

    if ( show_arrows_ )
    {
      tf::Stamped<tf::Pose> parent_pose( btTransform( btQuaternion( 0, 0, 0 ), btVector3( 0, 0, 0 ) ), ros::Time(), frame->parent_ );

      if (tf_->canTransform(fixed_frame_, frame->parent_, ros::Time()))
      {
        try
        {
          tf_->transformPose( fixed_frame_, parent_pose, parent_pose );
        }
        catch(tf::TransformException& e)
        {
          ROS_ERROR( "Error transforming frame '%s' to frame '%s'\n", frame->parent_.c_str(), fixed_frame_.c_str() );
        }
      }

      Ogre::Vector3 parent_position = Ogre::Vector3( parent_pose.getOrigin().x(), parent_pose.getOrigin().y(), parent_pose.getOrigin().z() );
      robotToOgre( parent_position );

      Ogre::Vector3 direction = parent_position - frame->position_;
      float distance = direction.length();
      direction.normalise();

      Ogre::Quaternion orient = Ogre::Vector3::NEGATIVE_UNIT_Z.getRotationTo( direction );

      Ogre::Vector3 old_pos = frame->parent_arrow_->getPosition();

      // The set() operation on the arrow is rather expensive (has to clear/regenerate geometry), so
      // avoid doing it if possible
      bool distance_changed = abs(distance - frame->distance_to_parent_) > 0.0001f;
      if ( distance_changed )
      {
        frame->distance_to_parent_ = distance;
        float head_length = ( distance < 0.1 ) ? (0.1*distance) : 0.1;
        float shaft_length = distance - head_length;
        frame->parent_arrow_->set( shaft_length, 0.01, head_length, 0.08 );
        frame->parent_arrow_->setShaftColor( 0.8f, 0.8f, 0.3f, 1.0f );
        frame->parent_arrow_->setHeadColor( 1.0f, 0.1f, 0.6f, 1.0f );
        frame->parent_arrow_->setUserData( Ogre::Any( (void*)this ) );
      }

      if ( distance > 0.001f )
      {
        frame->parent_arrow_->getSceneNode()->setVisible( show_arrows_ );
      }
      else
      {
        frame->parent_arrow_->getSceneNode()->setVisible( false );
      }

      frame->parent_arrow_->setPosition( frame->position_ );
      frame->parent_arrow_->setOrientation( orient );
    }
    else
    {
      frame->parent_arrow_->getSceneNode()->setVisible( false );
    }
  }
  else
  {
    if ( !frame->tree_property_ || old_parent != frame->parent_ )
    {
      property_manager_->deleteProperty( frame->tree_property_ );
      frame->tree_property_ = property_manager_->createCategory( frame->name_, property_prefix_ + frame->name_ + "Tree", tree_category_, this );
    }

    frame->parent_arrow_->getSceneNode()->setVisible( false );
  }

  frame->parent_property_->changed();
}
Ejemplo n.º 3
0
/* ************************************************************************* */
CalibratedCamera CalibratedCamera::retract(const Vector& d) const {
	return CalibratedCamera(pose().retract(d)) ;
}
Ejemplo n.º 4
0
 static pose from_trans_rot (const Eigen::Vector2d& trans, const Eigen::Rotation2Dd& rot) {
     return pose (trans, rot);
 }
	void executeCB(const lasa_action_planners::PLAN2CTRLGoalConstPtr &goal)
	{

		std::string desired_action = goal->action_type;
		ROS_INFO_STREAM( "Desired Action is " << desired_action);

		isOkay = false, isFTOkay = false;
		ros::Rate r(10);
		ROS_INFO("Waiting for EE pose/ft topic...");
		while(ros::ok() && (!isOkay || !isFTOkay)) {
			r.sleep();
		}
		if(!ros::ok()) {
			result_.success = 0;
			ROS_INFO("%s: Failed", action_name_.c_str());
			as_.setAborted(result_);
			return;
		}

		// initialize action progress as null
		feedback_.progress = 0;
		bool success = false;
		///////////////////////////////////////////////
		/////----- EXECUTE REQUESTED ACTION ------/////
		///////////////////////////////////////////////

		if (goal->action_type=="HOME"){
			// TODO: do something meaningful here
			tf::Pose pose(ee_pose);
			success = go_home(pose);
		} else if(goal->action_type == "HOME_POUR") {
			// Home for pouring with lasa robot
			tf::Pose pose(ee_pose);
			pose.setOrigin(tf::Vector3(-0.65, -0.11, 0.474));
			pose.setRotation(tf::Quaternion(-0.006, 0.907, -0.420, -0.114));
			success = go_home(pose);
		}
		if(goal->action_type=="FIND_TABLE"){
			// Go down until table found
			tf::Pose pose(ee_pose);
			success = go_home(ee_pose);
			if(success) {
				success = find_table_for_rolling(goal->height, 0.03, goal->threshold);
			}
		}
		if(goal->action_type=="ROLL_TEST"){

			/**** For now use this instead **/
			tf::Pose p(ee_pose);
			/*********************************/

			success = go_home(p);
			if(success) {
				success = find_table_for_rolling(0.15, 0.03, 5);
				if(success) {
					success = rolling(goal->force, goal->speed, 0.1);
				}
			}
		}
		// Use learned models to do shit
		if(goal->action_type=="LEARNED_MODEL"){
			DoughTaskPhase phase;
			if(goal->action_name == "roll") {
				phase = PHASEROLL;
			} else if(goal->action_name == "reach") {
				phase = PHASEREACH;
			} else if(goal->action_name == "back") {
				phase = PHASEBACK;
			} else if(goal->action_name == "home") {
				phase = PHASEHOME;
			} else {
				ROS_ERROR_STREAM("Unidentified action name "<<goal->action_name.c_str());
				result_.success = 0;
				as_.setAborted(result_);
				return;
			}

			//TODO: update these from action
			double reachingThreshold = 0.02, model_dt = 0.01;
			//CDSController::DynamicsType masterType = CDSController::LINEAR_DYNAMICS;
			CDSController::DynamicsType masterType = CDSController::MODEL_DYNAMICS;
			//CDSController::DynamicsType slaveType = CDSController::LINEAR_DYNAMICS;
			CDSController::DynamicsType slaveType = CDSController::UTHETA;
			tf::Transform trans_obj, trans_att;

			//Little hack for using reaching phase for HOMING				
			if (phase==PHASEHOME){
				     phase=PHASEREACH;
				     homing = true;
			}

			switch (action_mode) {
			case ACTION_BOXY_FIXED:
				/*if(phase == PHASEREACH) {
					trans_obj.setOrigin(tf::Vector3(0.7, -0.43, 0.64)); // TODO: remember to add 0.15 to tf values as well
					trans_obj.setRotation(tf::Quaternion(-0.01, 0.99, 0.005, -0.009));
				} else if(phase == PHASEROLL) {
					trans_obj.setOrigin(tf::Vector3(0.85, -0.43, ee_pose.getOrigin().z()));
					trans_obj.setRotation(tf::Quaternion(-0.01, 0.99, 0.005, -0.009));
				} else if(phase == PHASEBACK) {
					trans_obj.setOrigin(tf::Vector3(0.73, -0.63, 0.84));
					trans_obj.setRotation(tf::Quaternion(-0.01, 0.99, 0.005, -0.009));
				}
				trans_att.setIdentity();*/
				
				trans_obj.setIdentity();
				trans_obj.setOrigin(tf::Vector3(0.8, -0.43, 0.64)); 

				if(phase == PHASEREACH) {
				        trans_att.setOrigin(tf::Vector3(-0.05, 0,0)); // TODO: remember to add 0.15 to tf values as well
					trans_att.setRotation(tf::Quaternion(-0.01, 0.99, 0.005, -0.009));
				} else if(phase == PHASEROLL) {
					trans_att.setOrigin(tf::Vector3(0.05, 0, 0));
					trans_att.setRotation(tf::Quaternion(-0.01, 0.99, 0.005, -0.009));
				} else if(phase == PHASEBACK) {
					trans_att.setOrigin(tf::Vector3(-0.15, -0.2, 0.15));
					trans_att.setRotation(tf::Quaternion(-0.01, 0.99, 0.005, -0.009));
				}
				break;
			case ACTION_LASA_FIXED:
				if(phase == PHASEREACH) {
					trans_obj.setOrigin(tf::Vector3(-0.55, -0.10, 0.3)); // TODO: remember to add 0.15 to tf values as well (z was 0.15)
					trans_obj.setRotation(tf::Quaternion(0.0, 1.0, 0.0, 0.0));
				} else if(phase == PHASEROLL) {
					trans_obj.setOrigin(tf::Vector3(-0.55, -0.25, ee_pose.getOrigin().z()));
					trans_obj.setRotation(tf::Quaternion(0.0, 1.0, 0.0, 0.0));
				} else if(phase == PHASEBACK) {
					trans_obj.setOrigin(tf::Vector3(-0.55, -0.25, 0.3)); //(z was 0.15)
					trans_obj.setRotation(tf::Quaternion(0.0, 1.0, 0.0, 0.0));
				}
				trans_att.setIdentity();
				break;
			case ACTION_VISION:
				trans_obj.setRotation(tf::Quaternion(goal->object_frame.rotation.x,goal->object_frame.rotation.y,
						goal->object_frame.rotation.z,goal->object_frame.rotation.w));
				trans_obj.setOrigin(tf::Vector3(goal->object_frame.translation.x, goal->object_frame.translation.y,
						goal->object_frame.translation.z));

				trans_att.setRotation(tf::Quaternion(goal->attractor_frame.rotation.x,goal->attractor_frame.rotation.y,
						goal->attractor_frame.rotation.z,goal->attractor_frame.rotation.w));
				trans_att.setOrigin(tf::Vector3(goal->attractor_frame.translation.x, goal->attractor_frame.translation.y,
						goal->attractor_frame.translation.z));


				if (bWaitForForces){ //HACK FOR BOXY
					// Hack! For setting heights for reach and add offset of roller 
					if(phase == PHASEREACH) {
						trans_att.setOrigin(tf::Vector3(goal->attractor_frame.translation.x, goal->attractor_frame.translation.y,goal->attractor_frame.translation.z + 0.1));
					}

					// Hack! safety for rolling
					if(phase == PHASEROLL) {
						trans_obj.setOrigin(tf::Vector3(goal->object_frame.translation.x, goal->object_frame.translation.y,ee_pose.getOrigin().getZ()));
						trans_att.setOrigin(tf::Vector3(goal->attractor_frame.translation.x, goal->attractor_frame.translation.y, 0.0));				
					}
				}

				/*if (bWaitForForces){ //HACK FOR LASA
					// Hack! For setting heights for reach and back
					if(phase == PHASEREACH || phase == PHASEBACK) {
						trans_obj.getOrigin().setZ(0.15);
						trans_att.getOrigin().setZ(0.0);
					}

					// Hack! safety for rolling
					if(phase == PHASEROLL) {
						trans_obj.getOrigin().setZ(ee_pose.getOrigin().getZ());
						trans_att.getOrigin().setZ(0.0);
					}
				}*/

				break;
			default:
				break;
			}


			std::string force_gmm_id = goal->force_gmm_id;
			success = learned_model_execution(phase, masterType, slaveType, reachingThreshold,
					model_dt, trans_obj, trans_att, base_path, force_gmm_id);


		}

		result_.success = success;

                homing=false;
		if(success)
		{

			if (!homing){
					ROS_WARN_STREAM("STORE PLOT");
					plot_dyn = 2;
					plot_dyn_msg.data = plot_dyn;	
					pub_plot_dyn_.publish(plot_dyn_msg);
					ros::Rate r(1);
					r.sleep();
			}

			//Wait for message of "plot stored"
			/*ros::Rate wait(1000);
			while(ros::ok() && (plot_published!=1)) {
				ros::spinOnce();
				wait.sleep();
			}*/						
			ROS_INFO("%s: Succeeded", action_name_.c_str());
			as_.setSucceeded(result_);
				

		} else {
			ROS_INFO("%s: Failed", action_name_.c_str());
			as_.setAborted(result_);
		}




	}
Ejemplo n.º 6
0
ModelTracker::Transform ModelTracker::softPosit(unsigned int numImagePoints,ModelTracker::ImgPoint imagePoints[],const Transform& initialTransform)
	{
	typedef Transform::Vector Vector;
	
	/* Pre-transform the image points by the image transformation: */
	for(unsigned int ipi=0;ipi<numImagePoints;++ipi)
		imagePoints[ipi]=imgTransform.transform(imagePoints[ipi]);
	
	/* Assign initial homogeneous weights to the model points: */
	for(unsigned int mpi=0;mpi<numModelPoints;++mpi)
		mpws[mpi]=1.0;
	
	/* Create the assignment matrix: */
	Math::Matrix m(numImagePoints+1,numModelPoints+1);
	
	/* Initialize the "slack" rows and columns: */
	double gamma=1.0/double(Math::max(numImagePoints,numModelPoints)+1);
	for(unsigned int ipi=0;ipi<numImagePoints;++ipi)
		m(ipi,numModelPoints)=gamma;
	for(unsigned int mpi=0;mpi<numModelPoints;++mpi)
		m(numImagePoints,mpi)=gamma;
	m(numImagePoints,numModelPoints)=gamma;
	
	/* Initialize the pose vectors: */
	Transform::Rotation inverseOrientation=Geometry::invert(initialTransform.getRotation());
	Vector r1=inverseOrientation.getDirection(0);
	Vector r2=inverseOrientation.getDirection(1);
	Vector t=initialTransform.getTranslation();
	double s=-f/t[2];
	
	/* Perform the deterministic annealing loop: */
	for(double beta=0.005;beta<=0.5;beta*=1.025)
		{
		/* Create the initial assignment matrix based on squared distances between projected object points and image points: */
		for(unsigned int ipi=0;ipi<numImagePoints;++ipi)
			for(unsigned int mpi=0;mpi<numModelPoints;++mpi)
				{
				double d2=Math::sqr((r1*modelPoints[mpi]+t[0])*s-mpws[mpi]*imagePoints[ipi][0])
				         +Math::sqr((r2*modelPoints[mpi]+t[1])*s-mpws[mpi]*imagePoints[ipi][1]);
				m(ipi,mpi)=Math::exp(-beta*(d2-maxMatchDist2));
				
				// DEBUGGING
				// std::cout<<' '<<d2;
				}
		// DEBUGGING
		// std::cout<<std::endl;
		
		/* Normalize the assignment matrix using Sinkhorn's method: */
		double rowMaxDelta,colMaxDelta;
		do
			{
			/* Normalize image point rows: */
			rowMaxDelta=0.0;
			for(unsigned int ipi=0;ipi<numImagePoints;++ipi)
				{
				/* Calculate the row sum: */
				double rowSum=0.0;
				for(unsigned int mpi=0;mpi<numModelPoints+1;++mpi)
					rowSum+=m(ipi,mpi);
				
				/* Normalize the row: */
				for(unsigned int mpi=0;mpi<numModelPoints+1;++mpi)
					{
					double oldM=m(ipi,mpi);
					m(ipi,mpi)/=rowSum;
					rowMaxDelta=Math::max(rowMaxDelta,Math::abs(m(ipi,mpi)-oldM));
					}
				}
			
			/* Normalize model point columns: */
			colMaxDelta=0.0;
			for(unsigned int mpi=0;mpi<numModelPoints;++mpi)
				{
				/* Calculate the column sum: */
				double colSum=0.0;
				for(unsigned int ipi=0;ipi<numImagePoints+1;++ipi)
					colSum+=m(ipi,mpi);
				
				/* Normalize the column: */
				for(unsigned int ipi=0;ipi<numImagePoints+1;++ipi)
					{
					double oldM=m(ipi,mpi);
					m(ipi,mpi)/=colSum;
					colMaxDelta=Math::max(colMaxDelta,Math::abs(m(ipi,mpi)-oldM));
					}
				}
			}
		while(rowMaxDelta+colMaxDelta>1.0e-4);
		
		/* Compute the left-hand side of the pose alignment linear system: */
		Math::Matrix lhs(4,4,0.0);
		for(unsigned int mpi=0;mpi<numModelPoints;++mpi)
			{
			const Point& mp=modelPoints[mpi];
			
			/* Calculate the linear equation weight for the model point: */
			double mpWeight=0.0;
			for(unsigned int ipi=0;ipi<numImagePoints;++ipi)
				mpWeight+=m(ipi,mpi);
			
			/* Enter the model point into the pose alignment linear system: */
			for(int i=0;i<3;++i)
				{
				for(int j=0;j<3;++j)
					lhs(i,j)+=mp[i]*mp[j]*mpWeight;
				lhs(i,3)+=mp[i]*mpWeight;
				}
			for(int j=0;j<3;++j)
				lhs(3,j)+=mp[j]*mpWeight;
			lhs(3,3)+=mpWeight;
			}
		
		/* Invert the left-hand side matrix: */
		Math::Matrix lhsInv;
		try
			{
			lhsInv=lhs.inverseFullPivot();
			}
		catch(Math::Matrix::RankDeficientError)
			{
			std::cerr<<"Left-hand side matrix is rank deficient"<<std::endl;
			for(int i=0;i<4;++i)
				{
				for(int j=0;j<4;++j)
					std::cerr<<"  "<<lhs(i,j);
				std::cerr<<std::endl;
				}
			
			std::cerr<<"Assignment matrix:"<<std::endl;
			for(unsigned int i=0;i<=numImagePoints;++i)
				{
				for(unsigned int j=0;j<=numModelPoints;++j)
					std::cerr<<"  "<<m(i,j);
				std::cerr<<std::endl;
				}
			
			return Transform::identity;
			}
		
		/* Perform a fixed number of iterations of POSIT: */
		for(unsigned int iteration=0;iteration<2U;++iteration)
			{
			/* Compute the right-hand side of the pose alignment linear system: */
			Math::Matrix rhs(4,2,0.0);
			for(unsigned int mpi=0;mpi<numModelPoints;++mpi)
				{
				const Point& mp=modelPoints[mpi];
				
				/* Enter the model point into the pose alignment linear system: */
				double sumX=0.0;
				double sumY=0.0;
				for(unsigned int ipi=0;ipi<numImagePoints;++ipi)
					{
					sumX+=m(ipi,mpi)*imagePoints[ipi][0];
					sumY+=m(ipi,mpi)*imagePoints[ipi][1];
					}
				sumX*=mpws[mpi];
				sumY*=mpws[mpi];
				
				for(int i=0;i<3;++i)
					{
					rhs(i,0)+=sumX*mp[i];
					rhs(i,1)+=sumY*mp[i];
					}
				rhs(3,0)+=sumX;
				rhs(3,1)+=sumY;
				}
			
			/* Solve the pose alignment system: */
			Math::Matrix pose=lhsInv*rhs;
			for(int i=0;i<3;++i)
				{
				r1[i]=pose(i,0);
				r2[i]=pose(i,1);
				}
			
			/* Orthonormalize the pose vectors: */
			double s1=r1.mag();
			double s2=r2.mag();
			Vector r3=Geometry::normalize(r1^r2);
			Vector mid=r1/s1+r2/s2;
			mid/=mid.mag()*Math::sqrt(2.0);
			Vector mid2=r3^mid;
			r1=mid-mid2;
			r2=mid+mid2;
			s=Math::sqrt(s1*s2);
			t[0]=pose(3,0)/s;
			t[1]=pose(3,1)/s;
			t[2]=-f/s;
			
			/* Update the object points' homogeneous weights: */
			for(unsigned int mpi=0;mpi<numModelPoints;++mpi)
				mpws[mpi]=(r3*modelPoints[mpi])/t[2]+1.0;
			}
		
		// DEBUGGING
		// std::cout<<"Intermediate: "<<Transform(t,Geometry::invert(Transform::Rotation::fromBaseVectors(r1,r2)))<<std::endl;
		}
	
	// DEBUGGING
	std::cerr<<"Final assignment matrix:"<<std::endl;
	for(unsigned int i=0;i<=numImagePoints;++i)
		{
		for(unsigned int j=0;j<=numModelPoints;++j)
			std::cerr<<"  "<<m(i,j);
		std::cerr<<std::endl;
		}
	
	/* Return the result transformation: */
	return Transform(t,Geometry::invert(Transform::Rotation::fromBaseVectors(r1,r2)));
	}
Ejemplo n.º 7
0
	static pose polar (double dist, double dir, double bearing) {
            return pose ({ dist*std::cos(dir), dist*std::sin(dir) }, bearing);
	}
Ejemplo n.º 8
0
void ErrorModel::getImageCoordinates(const CalibrationState& state,
		const measurementData& measurement, double& x, double& y) {
    MeasurementPose pose(kinematicChain, measurement.jointState, "");
	pose.predictImageCoordinates(state, x, y);
}
Ejemplo n.º 9
0
	void LoadBundlerMap(const string& structure_file, const string& image_list_file, Map& map) {
		CHECK_PRED1(fs::exists, structure_file);
		CHECK_PRED1(fs::exists, image_list_file);

		// Get the base path to help find images
		fs::path basedir = fs::path(image_list_file).parent_path();

		// Read list of images
		sifstream list_in(image_list_file);
		vector<fs::path> image_paths;
		int nonexistent = 0;
		while (true) {
			string image_file;
			getline(list_in, image_file);
			if (list_in.eof()) break;
			fs::path path = basedir / image_file;
			if (!fs::exists(path)) {
				DLOG << "WARNING: image file not found: " << path;
			}
			image_paths.push_back(path);
		}

		// Load structure
		sifstream in(structure_file);

		// Ignore comment
		string comment;
		getline(in, comment);

		// Load number of cameras and points
		int num_cameras, num_points;
		in >> num_cameras >> num_points;

		CHECK(num_cameras > 0);
		CHECK_EQ(image_paths.size(), num_cameras)
			<< "Number of cameras should equal number of image files in "
			<< image_list_file;

		// Load poses
		bool warned_focal = false;
		bool warned_distortion = false;
		double focal_length;
		double f;
		Mat3 R;
		Vec3 t;
		Vec2 distortion;
		for (int i = 0; i < num_cameras; i++) {
			in >> f >> distortion >> R >> t;
			if (distortion != makeVector(0,0) && !warned_distortion) {
				DLOG << "WARNING: Distorted cameras not supported yet, "
						 << "loading map anyway";
				warned_distortion = true;
			}
			if (i == 0) {
				focal_length = f;
				// Construct a linear camera
				Mat3 cam = Identity;
				cam[0][0] = cam[1][1] = focal_length;
				map.camera.reset(new LinearCamera(cam, *gvDefaultImageSize));
			} else {
				if (f != focal_length && !warned_focal) {
					DLOG << "WARNING: Multiple focal lengths not supported yet, "
						"loading map anyway";
					warned_focal = true;
				}
			}

			// Bundler considers cameras looking down the negative Z axis so
			// invert (this only really matters for visualizations and
			// visibility testing).
			SE3<> pose(SO3<>(-R), -t);
			string image_file = i < image_paths.size() ? image_paths[i].string() : "";
			map.AddFrame(new Frame(i, image_file, pose));
		}


		// Load points
		// TODO: store observations
		Vec3 v, color;
		Vec2 obs_pt;
		int num_obs, obs_camera, obs_key;
		for (int i = 0; i < num_points; i++) {
			in >> v >> color >> num_obs;
			for (int j = 0; j < num_obs; j++) {
				in >> obs_camera >> obs_key >> obs_pt;
			}
			map.points.push_back(v);
		}
	}
Ejemplo n.º 10
0
main () {
    FILE *fin  = fopen ("packrec.in", "r");
    FILE *fout = fopen ("packrec.out", "w");
    int rw,rh,era,ret,m[4]={0};
	int i,j,k;
	rect rt,rt2,a,b,c,d;
	
	for(i=0;i<4;i++){
		fscanf (fin, "%d %d", &(frec[i].w),&(frec[i].h));
		frec[i]=vrt(frec[i]);
	}

	for(i=0;i<16;i++){//A
		pose(i);
		rt.w=rt.h=0;
		for(k=0;k<4;k++){
			rt=cnnt(rt,frec[k],1,0);
		}
		sto(rt);
	}
	
	for(i=0;i<16;i++){//B
		pose(i);
		for(j=0;j<24;j++){
			rt.w=rt.h=0;
			rt=cnnt(rt,frec[pm[j][0]],1,0);
			rt=cnnt(rt,frec[pm[j][1]],1,0);
			rt=cnnt(rt,frec[pm[j][2]],1,0);
			rt=cnnt(rt,frec[pm[j][3]],0,0);
			sto(rt);
		}
	}
	
	for(i=0;i<16;i++){//C
		pose(i);
		for(j=0;j<24;j++){
			rt.w=rt.h=0;
			rt=cnnt(rt,frec[pm[j][0]],1,0);
			rt=cnnt(rt,frec[pm[j][1]],1,0);
			rt=cnnt(rt,frec[pm[j][2]],0,0);
			rt=cnnt(rt,frec[pm[j][3]],1,0);
			sto(rt);
		}
	}
	
	for(i=0;i<16;i++){//D,E
		pose(i);
		for(j=0;j<24;j++){
			rt.w=rt.h=0;
			rt=cnnt(rt,frec[pm[j][0]],1,0);
			rt=cnnt(rt,frec[pm[j][1]],0,0);
			rt=cnnt(rt,frec[pm[j][2]],1,0);
			rt=cnnt(rt,frec[pm[j][3]],1,0);
			sto(rt);
		}
	}
	
	for(i=0;i<16;i++){//F
		pose(i);
		for(j=0;j<24;j++){
			rt.w=rt.h=rt2.w=rt2.h=0;
			rt=cnnt(rt,a=frec[pm[j][0]],1,0);
			rt=cnnt(rt,b=frec[pm[j][1]],0,0);
			rt2=cnnt(rt2,c=frec[pm[j][2]],1,0);
			rt2=cnnt(rt2,d=frec[pm[j][3]],0,0);
			rt=cnnt(rt,rt2,1,0);
			if(b.h<d.h&&c.w>d.w&&a.w<b.w)
				rt.w=max(b.w+d.w,a.w+c.w);
			sto(rt);
		}
	}
	
	/*//ver0.1
	rw=0;rh=0;ret=0;
	for(i=0;i<4;i++){
		fscanf (fin, "%d %d", &(frec[i].w),&(frec[i].h));
		frec[i]=vrt(frec[i]);
		if (frec[i].w>frec[i].h){
			swap(&(frec[i].w),&(frec[i].h));
		}
		rw+=frec[i].h;
	}
	rh=rw;
	era=rw*rh;
	*/
	
	//type A
	/*//ver0.2
	rt.w=rt.h=0;
	for(i=0;i<4;i++){
		rt=cnnt(rt,frec[i],1,0);
	}
	sto(vrt(rt));
	*/
	/*//ver0.1
	rh=0;
	for(i=0;i<4;i++){
		if(frec[i].h>rh)rh=frec[i].h;
		rw+=frec[i].w;
	}
	rt.w=rw;rt.h=rh;
	sto(rt);
	*/
	
	//type B
	/*//ver0.2
	rt.w=rt.h=0;
	for(i=0;i<4;i++){
		m[i]=1;
		for(j=0;j<4;j++){
			if(!m[j])rt=cnnt(rt,frec[j],1,0);
		}
		sto(vrt(rt,hor(frec[i])));
		m[i]=0;
	}
	*/
	/*//ver0.1
	for(i=0;i<4;i++)m[i]=0;
	for(i=0;i<4;i++){
		m[i]=1;
		rw=frec[i].h;rh=0;t=0;
		for(j=0;j<4;j++){
			if(!m[j]){
				t+=frec[j].w;
				if (frec[j].h>rh)rh=frec[j].h;
			}
		}
		rw=max(rw,t);
		rh+=frec[i].w;
		rt.w=rw;rt.h=rh;
		sto(rt);
		m[i]=0;
	}
	*/
	
	//type C 
	/*//ver0.2
	for(i=0;i<4;i++){
		m[i]=1;
		for(j=0;j<4;j++){
			if(!m[j]){
				m[j]=1;
				rt.w=rt.h=0;
				for(k=0;k<4;k++){
					if(!frec[k]){
						rt=cnnt(rt,frec[k],1,0);
					}
				}
				rt=cnnt(rt,hor(frec[j]),0,0);
				rt=cnnt(rt,frec[i],1,0);
				sto(vrt(rt));
				m[j]=0;
			}
		}
		m[i]=0;
	}
	*/
	/*//ver0.1
	for(i=0;i<4;i++)m[i]=0;
	for(i=0;i<4;i++){
		m[i]=1;
		for(j=0;j<4;j++){
			if(!m[j]){
				m[j]=1;
				rw=0;rh=0;
				for(k=0;k<4;k++){
					if(!frec[k]){
						if(frec[k].h>rh)rh=frec[k].h;
						rw+=frec[k].w;
					}
				}
				rh+=frec[j].w;
				rw=max(frec[j].h,rw);
				rw+=frec[i].w;
				rh=max(frec[i].h,rh);
				rt.w=rw;rt.h=rh;
				sto(rt);
				m[j]=0;
			}
		}
		m[i]=0;
	}
	*/
	
	//type D
	/*//ver0.2
	for(i=0;i<4;i++){
		m[i]=1;
		for(j=0;j<4;j++){
			if(!m[j]){
				m[j]=1;
				rt.w=rt.h=0;
				for(k=0;k<4;k++){
					if(!frec[k]){
						rt=cnnt(rt,frec[k],0,0);
					}
				}
				rt=cnnt(rt,hor(frec[j]),1,0);
				rt=cnnt(rt,frec[i],1,0);
				sto(vrt(rt));
				m[j]=0;
			}
		}
		m[i]=0;
	}
	*/
	//type E same as D
	//type F
	
	qsort(bxs,sz,sizeof(rect),cmp);
	fprintf (fout, "%d\n", min);
	for(i=0;i<sz;i++){
		fprintf (fout, "%d %d\n", bxs[i].w, bxs[i].h);
	}
    exit (0);
}
Ejemplo n.º 11
0
int main(int argc, char ** argv) {

	ros::init(argc, argv, "path_motion");
	ros::NodeHandle n;

	ros::Publisher motionPub = n.advertise<std_msgs::Float32MultiArray>(
			"/hardware/mobile_base/speeds", 1);
	ros::Subscriber sub_path_motion = n.subscribe("path_motion", 1,
			&callbackMotionPath);
	ros::Subscriber sub_goal_motion = n.subscribe("goal_motion", 1,
			&callbackMotionGoal);
	ros::Rate rate(10);

	RobotPoseSubscriber pose(&n);

	std_msgs::Float32MultiArray speeds;
	speeds.data.push_back(0);
	speeds.data.push_back(0);

	control.SetRobotParams(0.48);

	while (ros::ok()) {

		float xpos = pose.getCurrPos().x;
		float ypos = pose.getCurrPos().y;
		float anglepos = pose.getCurrPos().theta;

		if (newPath) {
			float goalx = path_ptr->poses[curr_index_nav].pose.position.x;
			float goaly = path_ptr->poses[curr_index_nav].pose.position.y;
			float errorX = goalx - xpos;
			float errorY = goaly - ypos;
			float error = sqrt(errorX * errorX + errorY * errorY);
			if (error < 0.1) {
				if (curr_index_nav < path_ptr->poses.size() - 1)
					curr_index_nav++;
				else {
					speeds.data[0] = 0;
					speeds.data[1] = 0;
					motionPub.publish(speeds);
					newPath = false;
				}
			} else {
				control.CalculateSpeeds(xpos, ypos, anglepos, goalx, goaly,
						speeds.data[0], speeds.data[1], false);
				motionPub.publish(speeds);
			}
		}
		if (newGoal) {
			float errorX = goal_ptr->x - xpos;
			float errorY = goal_ptr->y - ypos;
			float error = sqrt(errorX * errorX + errorY * errorY);
			if (error < 0.05) {
				speeds.data[0] = 0;
				speeds.data[1] = 0;
				motionPub.publish(speeds);
				newGoal = false;
			} else {
				control.CalculateSpeeds(xpos, ypos, anglepos, goal_ptr->x,
						goal_ptr->y, speeds.data[0], speeds.data[1], false);
				motionPub.publish(speeds);
			}
		}

		rate.sleep();
		ros::spinOnce();
	}

}
// METHOD THAT RECEIVES SURFLET CLOUDS (USED FOR MIAN DATASED)
std::vector<cluster> poseEstimationSV::poseEstimationCore(pcl::PointCloud<pcl::PointNormal>::Ptr cloud)
{

	int bestPoseAlpha;
	int bestPosePoint;
	int bestPoseVotes;
	
	pcl::PointIndices normals_nan_indices;


	float alpha;
	unsigned int alphaBin,index;
	// Iterators
	std::vector<int>::iterator sr; // scene reference point
	pcl::PointCloud<pcl::PointNormal>::iterator si;	// scene paired point
	std::vector<pointPairSV>::iterator sameFeatureIt; // same key on hash table
	std::vector<boost::shared_ptr<pose> >::iterator bestPosesIt;

	Eigen::Vector4f feature;

	/*std::cout << "\tDownsample dense surflet cloud... " << std::endl;
	cout << "\t\tSurflet cloud size before downsampling: " << cloud->size() << endl;
 	// Create the filtering object
  	pcl::VoxelGrid<pcl::PointNormal> sor;
  	sor.setInputCloud (cloud);
  	sor.setLeafSize (model->distanceStep,model->distanceStep,model->distanceStep);
  	sor.filter (*cloudNormals);
	cout << "\t\tSurflet cloud size after downsampling: " << cloudNormals->size() << endl;
	std::cout<< "\tDone" << std::endl;*/

	cloudNormals=cloud;
	/*boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer2 = objectModel::viewportsVis(cloudNormals);

  	while (!viewer2->wasStopped ())
  	{
   		viewer2->spinOnce (100);
    		boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  	}*/
	/*viewer2 = objectModel::viewportsVis(cloudNormals);

  	while (!viewer2->wasStopped ())
  	{
   		viewer2->spinOnce (100);
    		boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  	}*/
	/*std::cout<< "  Re-estimate normals... " << std::endl;
	pcl::PointCloud<pcl::PointXYZ>::Ptr modelCloudPoint(new pcl::PointCloud<pcl::PointXYZ>);
	for(si=cloudNormals->begin(); si<cloudNormals->end(); ++si)
	{
		modelCloudPoint->push_back(pcl::PointXYZ(si->x,si->y,si->z));
	}
	model->computeNormals(modelCloudPoint,cloudNormals);
	std::cout << "  Done" << std::endl;*/

	//////////////////////////////////////////////////////////////////////////////
	// Filter again to remove spurious normals nans (and it's associated point) //
	//////////////////////////////////////////////////////////////////////////////

	for (unsigned int i = 0; i < cloudNormals->points.size(); ++i) 
	{
		if (isnan(cloudNormals->points[i].normal[0]) || isnan(cloudNormals->points[i].normal[1]) || isnan(cloudNormals->points[i].normal[2]))
		{
	   		normals_nan_indices.indices.push_back(i);
		}
	}


	pcl::ExtractIndices<pcl::PointNormal> nan_extract;
	nan_extract.setInputCloud(cloudNormals);
	nan_extract.setIndices(boost::make_shared<pcl::PointIndices> (normals_nan_indices));
	nan_extract.setNegative(true);
	nan_extract.filter(*cloudWithNormalsDownSampled);
	std::cout<< "\tCloud size after removing NaN normals: " << cloudWithNormalsDownSampled->size() << endl;

	//////////////
	// VOTATION //
	//////////////

	

	/////////////////////////////////////////////
	// Extract reference points from the scene //
	/////////////////////////////////////////////
	//pcl::RandomSample< pcl::PointCloud<pcl::PointNormal> > randomSampler;
	//randomSampler.setInputCloud(cloudWithNormalsDownSampled);

	int numberOfPoints=(int) (cloudWithNormalsDownSampled->size () )*referencePointsPercentage;
	int totalPoints=(int) (cloudWithNormalsDownSampled->size ());
	std::cout << "\tUniform sample a set of " << numberOfPoints << "(" << referencePointsPercentage*100 <<  "%)... ";
	referencePointsIndices->indices.clear();
	extractReferencePointsUniform(referencePointsPercentage,totalPoints);
	std::cout << "Done" << std::endl;
	//std::cout << referencePointsIndices->indices.size() << std::endl;
	//pcl::PointCloud<pcl::PointNormal>::Ptr testeCloud=pcl::PointCloud<pcl::PointNormal>::Ptr (new pcl::PointCloud<pcl::PointNormal>);
	/*for(sr=referencePointsIndices->indices.begin(); sr < referencePointsIndices->indices.end(); ++sr)
	{
		testeCloud->push_back(cloudWithNormalsDownSampled->points[*sr]);
	}*/
	//std::cout <<  totalPoints << std::endl;
	/*boost::shared_ptr<pcl::visualization::PCLVisualizer> viewer2 = objectModel::viewportsVis(cloudWithNormalsDownSampled);

  	while (!viewer2->wasStopped ())
  	{
   		viewer2->spinOnce (100);
    		boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  	}*/
	//////////////
	// Votation //
	//////////////
	// Clear all transformations stored
	std::cout<< "\tVotation... ";
  //std::vector<int>::size_type sz;

  bestPoses.clear();
  //sz = bestPoses.capacity();

  std::vector<int> matches_per_feature;



	std::vector<double> scene_to_global_time;
	std::vector<double> reset_accumulator_time;
	std::vector<double> ppf_time;
	std::vector<double> hash_time;
	std::vector<double> matching_time;
	std::vector<double> get_best_peak_time;

	//Tic();
	for(sr=referencePointsIndices->indices.begin(); sr < referencePointsIndices->indices.begin()+1; ++sr)
	//for(sr=referencePointsIndices->indices.begin(); sr < referencePointsIndices->indices.end(); ++sr)
	{
		Tic();
		Eigen::Vector3f scenePoint=cloudWithNormalsDownSampled->points[*sr].getVector3fMap();
		Eigen::Vector3f sceneNormal=cloudWithNormalsDownSampled->points[*sr].getNormalVector3fMap ();


		Eigen::Vector3f cross=sceneNormal.cross (Eigen::Vector3f::UnitX ()). normalized();
		Eigen::Affine3f rotationSceneToGlobal;

		// Get transformation from scene frame to global frame
		if(sceneNormal.isApprox(Eigen::Vector3f::UnitX (),0.00001))
		{
			rotationSceneToGlobal=Eigen::AngleAxisf(0.0,Eigen::Vector3f::UnitX ());
		}
		else
		{
			cross=sceneNormal.cross (Eigen::Vector3f::UnitX ()). normalized();

			if (isnan(cross[0]))
			{
				std::cout << "YA"<< std::endl;
				exit(-1);
				rotationSceneToGlobal=Eigen::AngleAxisf(0.0,Eigen::Vector3f::UnitX ());
			}
			else
			{
				rotationSceneToGlobal=Eigen::AngleAxisf(acosf (sceneNormal.dot (Eigen::Vector3f::UnitX ())),cross);
			}
		}

		Eigen::Affine3f transformSceneToGlobal = Eigen::Translation3f ( rotationSceneToGlobal* ((-1)*scenePoint)) * rotationSceneToGlobal;

		Tac();
		scene_to_global_time.push_back(timeEnd);
		//////////////////////
		// Choose best pose //
		//////////////////////

		// Reset pose accumulator
		Tic();
		for(std::vector<std::vector<int> >::iterator accumulatorIt=accumulator.begin();accumulatorIt < accumulator.end(); ++accumulatorIt)
		{
			std::fill(accumulatorIt->begin(),accumulatorIt->end(),0); 
		}
		Tac();
		reset_accumulator_time.push_back(timeEnd);

		for(si=cloudWithNormalsDownSampled->begin(); si < cloudWithNormalsDownSampled->end();++si)
		{
			// if same point, skip point pair
			if( (cloudWithNormalsDownSampled->points[*sr].x==si->x) && (cloudWithNormalsDownSampled->points[*sr].y==si->y) && (cloudWithNormalsDownSampled->points[*sr].z==si->z))
			{
				continue;
			}	
			float squaredDist=(cloudWithNormalsDownSampled->points[*sr].x-si->x)*
					  (cloudWithNormalsDownSampled->points[*sr].x-si->x)+
					  (cloudWithNormalsDownSampled->points[*sr].y-si->y)*
					  (cloudWithNormalsDownSampled->points[*sr].y-si->y)+
					  (cloudWithNormalsDownSampled->points[*sr].z-si->z)*
					  (cloudWithNormalsDownSampled->points[*sr].z-si->z);
			if(squaredDist>model->maxModelDistSquared)
			{
				continue;
			}
			Tic();
			float dist=sqrt(squaredDist);
			// Compute PPF
			pointPairSV PPF=pointPairSV(cloudWithNormalsDownSampled->points[*sr],*si,transformSceneToGlobal);
			Tac();
			ppf_time.push_back(timeEnd);
			Tic();
			// Compute index
			index=PPF.getHash(*si,model->distanceStepInverted);
			Tac();
			hash_time.push_back(timeEnd);
			// If distance between point pairs is bigger than the maximum for this model, skip point pair
			if(index>=pointPairSV::maxHash)
			{
				//std::cout << "DEBUG" << std::endl;
				continue;
			}

			// If there is no similar point pair features in the model, skip point pair and avoid computing the alpha
			if(model->hashTable[index].size()==0)
				continue; 

			int matches=0; //Tests
			Tic();
			// Iterate over similar point pairs
			for(sameFeatureIt=model->hashTable[index].begin(); sameFeatureIt<model->hashTable[index].end(); ++sameFeatureIt)
			{
				// Vote on the reference point and angle (and object)
				alpha=sameFeatureIt->alpha-PPF.alpha; // alpha values between [-360,360]

				// alpha values should be between [-180,180] ANGLE_MAX = 2*PI
				if(alpha<(-PI))
					alpha=ANGLE_MAX+alpha;
				else if(alpha>(PI))
					alpha=alpha-ANGLE_MAX;

				alphaBin=static_cast<unsigned int> ( round((alpha+PI)*pointPair::angleStepInverted) ); // division is slower than multiplication

				if(alphaBin>=pointPair::angleBins)
				{	
					alphaBin=0;
				}

				accumulator[sameFeatureIt->id][alphaBin]+=sameFeatureIt->weight;
				++matches;//Tests
			}
			Tac();
			matching_time.push_back(timeEnd);
			matches_per_feature.push_back(matches);
		}
		//Tac();

		// Choose best pose (highest peak on the accumulator[peak with more votes])
		bestPosePoint=0;
		bestPoseAlpha=0;
		bestPoseVotes=0;
		Tic();
		for(size_t p=0; p < model->modelCloud->size(); ++p)
		{
			for(unsigned int a=0; a < pointPair::angleBins; ++a)
			{
				if(accumulator[p][a]>bestPoseVotes)
				{
					bestPoseVotes=accumulator[p][a];
					bestPosePoint=p;
					bestPoseAlpha=a;
				}
			}
		}
		Tac();
		get_best_peak_time.push_back(timeEnd);

		// A candidate pose was found
		if(bestPoseVotes!=0)
		{
			// Compute and store transformation from model to scene
			bestPoses.push_back(pose( bestPoseVotes,model->modelToScene(bestPosePoint,transformSceneToGlobal,static_cast<float>(bestPoseAlpha)*pointPair::angleStep-PI) ));
		}
		else 
		{
			continue;
		}

		// Choose poses whose votes are a percentage above a given threshold of the best pose
		accumulator[bestPosePoint][bestPoseAlpha]=0; 	// This is more efficient than having an if condition to verify if we are considering the best pose again
		for(size_t p=0; p < model->modelCloud->size(); ++p)
		{
			for(unsigned int a=0; a < pointPair::angleBins; ++a)
			{
				if(accumulator[p][a]>=accumulatorPeakThreshold*bestPoseVotes)
				{
					bestPoses.push_back(pose( accumulator[p][a],model->modelToScene(p,transformSceneToGlobal,static_cast<float>(a)*pointPair::angleStep-PI) ));
				}
			}
		}

   /* if (sz!=bestPoses.capacity()) {
      sz = bestPoses.capacity();
      std::cout << "capacity changed: " << sz << '\n';
      exit(-1);
    }*/

	}
//Tac();
	std::cout << "Done" << std::endl;

	if(bestPoses.size()==0)
	{

		clusters.clear();
		return clusters;
	}


	//////////////////////
	// Compute clusters //
	//////////////////////

	std::cout << "\tCompute clusters... ";
Tic();
	clusters=poseClustering(bestPoses);
	std::cout << "Done" << std::endl;
	clusters[0].voting_time=timeEnd;
	std::cout << timeEnd << " " << clusters[0].voting_time << std::endl;
Tac();
	clusters[0].clustering_time=timeEnd;
	clusters[0].matches_per_feature=matches_per_feature;
  clusters[0].scene_to_global_time=scene_to_global_time;
	clusters[0].reset_accumulator_time=reset_accumulator_time;
	clusters[0].ppf_time=ppf_time;
	clusters[0].hash_time=hash_time;
	clusters[0].matching_time=matching_time;
	clusters[0].get_best_peak_time=get_best_peak_time;

	std::cout << "clusters size:" << clusters.size()<< std::endl;
	return clusters;
}
// METHOD THAT RECEIVES POINT CLOUDS (OPEN MP)
std::vector<cluster> poseEstimationSV::poseEstimationCore_openmp(pcl::PointCloud<pcl::PointXYZ>::ConstPtr cloud)
{
Tic();
	std::vector <std::vector < pose > > bestPosesAux;
	bestPosesAux.resize(omp_get_num_procs());

	//int bestPoseAlpha;
	//int bestPosePoint;
	//int bestPoseVotes;
	
	Eigen::Vector3f scenePoint;
	Eigen::Vector3f sceneNormal;


	pcl::PointIndices normals_nan_indices;
	pcl::ExtractIndices<pcl::PointNormal> nan_extract;

	float alpha;
	unsigned int alphaBin,index;
	// Iterators
	//unsigned int sr; // scene reference point
	pcl::PointCloud<pcl::PointNormal>::iterator si;	// scene paired point
	std::vector<pointPairSV>::iterator sameFeatureIt; // same key on hash table
	std::vector<boost::shared_ptr<pose> >::iterator bestPosesIt;

	Eigen::Vector4f feature;
	Eigen::Vector3f _pointTwoTransformed;
	std::cout<< "\tCloud size: " << cloud->size() << endl;
	//////////////////////////////////////////////
	// Downsample point cloud using a voxelgrid //
	//////////////////////////////////////////////

	pcl::PointCloud<pcl::PointXYZ>::Ptr cloudDownsampled(new pcl::PointCloud<pcl::PointXYZ> ());
  	// Create the filtering object
  	pcl::VoxelGrid<pcl::PointXYZ> sor;
  	sor.setInputCloud (cloud);
  	sor.setLeafSize (model->distanceStep,model->distanceStep,model->distanceStep);
  	sor.filter (*cloudDownsampled);
	std::cout<< "\tCloud size after downsampling: " << cloudDownsampled->size() << endl;

	// Compute point cloud normals (using cloud before downsampling information)
	std::cout<< "\tCompute normals... ";
	cloudNormals=model->computeSceneNormals(cloudDownsampled);
	std::cout<< "Done" << endl;

	/*boost::shared_ptr<pcl_visualization::PCLVisualizer> viewer2 = objectModel::viewportsVis(cloudFilteredNormals);

  	while (!viewer2->wasStopped ())
  	{
   		viewer2->spinOnce (100);
    		boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  	}*/

	/*boost::shared_ptr<pcl_visualization::PCLVisualizer> viewer2 = objectModel::viewportsVis(model->modelCloud);

  	while (!viewer2->wasStopped ())
  	{
   		viewer2->spinOnce (100);
    		boost::this_thread::sleep (boost::posix_time::microseconds (100000));
  	}*/
	//////////////////////////////////////////////////////////////////////////////
	// Filter again to remove spurious normals nans (and it's associated point) //
	////////////////////////////////////////////////fa//////////////////////////////

	for (unsigned int i = 0; i < cloudNormals->points.size(); ++i) 
	{
		if (isnan(cloudNormals->points[i].normal[0]) || isnan(cloudNormals->points[i].normal[1]) || isnan(cloudNormals->points[i].normal[2]))
		{
	   		normals_nan_indices.indices.push_back(i);
		}
	}

	nan_extract.setInputCloud(cloudNormals);
	nan_extract.setIndices(boost::make_shared<pcl::PointIndices> (normals_nan_indices));
	nan_extract.setNegative(true);
	nan_extract.filter(*cloudWithNormalsDownSampled);
	std::cout<< "\tCloud size after removing NaN normals: " << cloudWithNormalsDownSampled->size() << endl;


	/////////////////////////////////////////////
	// Extract reference points from the scene //
	/////////////////////////////////////////////

	//pcl::RandomSample< pcl::PointCloud<pcl::PointNormal> > randomSampler;
	//randomSampler.setInputCloud(cloudWithNormalsDownSampled);
	// Create the filtering object
	int numberOfPoints=(int) (cloudWithNormalsDownSampled->size () )*referencePointsPercentage;
	int totalPoints=(int) (cloudWithNormalsDownSampled->size ());
	std::cout << "\tUniform sample a set of " << numberOfPoints << "(" << referencePointsPercentage*100 <<  "%)... ";
	referencePointsIndices->indices.clear();
	extractReferencePointsUniform(referencePointsPercentage,totalPoints);
	std::cout << "Done" << std::endl;
	//std::cout << referencePointsIndices->indices.size() << std::endl;

	//////////////
	// Votation //
	//////////////

	std::cout<< "\tVotation... ";

	omp_set_num_threads(omp_get_num_procs());
	//omp_set_num_threads(1);
	//int iteration=0;

        bestPoses.clear();
	#pragma omp parallel for private(alpha,alphaBin,alphaScene,sameFeatureIt,index,feature,si,_pointTwoTransformed) //reduction(+:iteration)  //nowait
	for(unsigned int sr=0; sr < referencePointsIndices->indices.size(); ++sr)
	{
	
		//++iteration;
		//std::cout << "iteration: " << iteration << " thread:" << omp_get_thread_num() << std::endl;
		//printf("Hello from thread %d, nthreads %d\n", omp_get_thread_num(), omp_get_num_threads());
		scenePoint=cloudWithNormalsDownSampled->points[referencePointsIndices->indices[sr]].getVector3fMap();
		sceneNormal=cloudWithNormalsDownSampled->points[referencePointsIndices->indices[sr]].getNormalVector3fMap();

		// Get transformation from scene frame to global frame
		Eigen::Vector3f cross=sceneNormal.cross (Eigen::Vector3f::UnitX ()). normalized();

		Eigen::Affine3f rotationSceneToGlobal;
		if(isnan(cross[0]))
		{
			rotationSceneToGlobal=Eigen::AngleAxisf(0.0,Eigen::Vector3f::UnitX ());
		}
		else
			rotationSceneToGlobal=Eigen::AngleAxisf(acosf (sceneNormal.dot (Eigen::Vector3f::UnitX ())),cross);

		Eigen::Affine3f transformSceneToGlobal = Eigen::Translation3f ( rotationSceneToGlobal* ((-1)*scenePoint)) * rotationSceneToGlobal;

		//////////////////////
		// Choose best pose //
		//////////////////////

		// Reset pose accumulator
		for(std::vector<std::vector<int> >::iterator accumulatorIt=accumulatorParallelAux[omp_get_thread_num()].begin();accumulatorIt < accumulatorParallelAux[omp_get_thread_num()].end(); ++accumulatorIt)
		{
			std::fill(accumulatorIt->begin(),accumulatorIt->end(),0); 
		}
		

		//std::cout << std::endl;
		for(si=cloudWithNormalsDownSampled->begin(); si < cloudWithNormalsDownSampled->end();++si)
		{
			// if same point, skip point pair
			if( (cloudWithNormalsDownSampled->points[referencePointsIndices->indices[sr]].x==si->x) && (cloudWithNormalsDownSampled->points[referencePointsIndices->indices[sr]].y==si->y) && (cloudWithNormalsDownSampled->points[referencePointsIndices->indices[sr]].z==si->z))
			{
				//std::cout << si->x << " " << si->y << " " << si->z << std::endl;
				continue;
			}	

			// Compute PPF
			pointPairSV PPF=pointPairSV(cloudWithNormalsDownSampled->points[sr],*si, transformSceneToGlobal);

			// Compute index
			index=PPF.getHash(*si,model->distanceStepInverted);

			// If distance between point pairs is bigger than the maximum for this model, skip point pair
			if(index>pointPairSV::maxHash)
			{
				//std::cout << "DEBUG" << std::endl;
				continue;
			}

			// If there is no similar point pair features in the model, skip point pair and avoid computing the alpha
			if(model->hashTable[index].size()==0)
				continue; 

			for(sameFeatureIt=model->hashTable[index].begin(); sameFeatureIt<model->hashTable[index].end(); ++sameFeatureIt)
			{
				// Vote on the reference point and angle (and object)
				alpha=sameFeatureIt->alpha-PPF.alpha; // alpha values between [-360,360]

				// alpha values should be between [-180,180] ANGLE_MAX = 2*PI
				if(alpha<(-PI))
					alpha=ANGLE_MAX+alpha;
				else if(alpha>(PI))
					alpha=alpha-ANGLE_MAX;
				//std::cout << "alpha after: " << alpha*RAD_TO_DEG << std::endl;
				//std::cout << "alpha after2: " << (alpha+PI)*RAD_TO_DEG << std::endl;
				alphaBin=static_cast<unsigned int> ( round((alpha+PI)*pointPair::angleStepInverted) ); // division is slower than multiplication
				//std::cout << "angle1: " << alphaBin << std::endl;
           			/*alphaBin = static_cast<unsigned int> (floor (alpha) + floor (PI *poseAngleStepInverted));
				std::cout << "angle2: " << alphaBin << std::endl;*/
				//alphaBin=static_cast<unsigned int> ( floor(alpha*poseAngleStepInverted) + floor(PI*poseAngleStepInverted) );
				if(alphaBin>=pointPair::angleBins)
				{	
					alphaBin=0;
					//ROS_INFO("naoooo");
					//exit(1);
				}

//#pragma omp critical
//{std::cout << index <<" "<<sameFeatureIt->id << " " << alphaBin << " " << omp_get_thread_num() << " " << accumulatorParallelAux[omp_get_thread_num()][sameFeatureIt->id][alphaBin] << std::endl;}

				accumulatorParallelAux[omp_get_thread_num()][sameFeatureIt->id][alphaBin]+=sameFeatureIt->weight;
			}
		}
		//ROS_INFO("DISTANCE:%f DISTANCE SQUARED:%f", model->maxModelDist, model->maxModel

		// Choose best pose (highest peak on the accumulator[peak with more votes])

		int bestPoseAlpha=0;
		int bestPosePoint=0;
		int bestPoseVotes=0;

		for(size_t p=0; p < model->modelCloud->size(); ++p)
		{
			for(unsigned int a=0; a < pointPair::angleBins; ++a)
			{
				if(accumulatorParallelAux[omp_get_thread_num()][p][a]>bestPoseVotes)
				{
					bestPoseVotes=accumulatorParallelAux[omp_get_thread_num()][p][a];
					bestPosePoint=p;
					bestPoseAlpha=a;
				}
			}
		}

		// A candidate pose was found
		if(bestPoseVotes!=0)
		{
			// Compute and store transformation from model to scene
			//boost::shared_ptr<pose> bestPose(new pose( bestPoseVotes,model->modelToScene(model->modelCloud->points[bestPosePoint],transformSceneToGlobal,static_cast<float>(bestPoseAlpha)*pointPair::angleStep-PI) ));

			bestPosesAux[omp_get_thread_num()].push_back(pose( bestPoseVotes,model->modelToScene(bestPosePoint,transformSceneToGlobal,static_cast<float>(bestPoseAlpha)*pointPair::angleStep-PI) ));
			//bestPoses.push_back(bestPose);

			//std::cout << bestPosesAux[omp_get_thread_num()].size() <<" " <<omp_get_thread_num()<< std::endl;
		}
		else 
		{
			continue;
		}

		// Choose poses whose votes are a percentage above a given threshold of the best pose
		accumulatorParallelAux[omp_get_thread_num()][bestPosePoint][bestPoseAlpha]=0; 	// This is more efficient than having an if condition to verify if we are considering the best pose again
		for(size_t p=0; p < model->modelCloud->size(); ++p)
		{
			for(unsigned int a=0; a < pointPair::angleBins; ++a)
			{
				if(accumulatorParallelAux[omp_get_thread_num()][p][a]>=accumulatorPeakThreshold*bestPoseVotes)
				{
					// Compute and store transformation from model to scene
					//boost::shared_ptr<pose> bestPose(new pose( accumulatorParallelAux[omp_get_thread_num()][p][a],model->modelToScene(model->modelCloud->points[p],transformSceneToGlobal,static_cast<float>(a)*pointPair::angleStep-PI ) ));


					//bestPoses.push_back(bestPose);
					bestPosesAux[omp_get_thread_num()].push_back(pose( bestPoseVotes,model->modelToScene(bestPosePoint,transformSceneToGlobal,static_cast<float>(bestPoseAlpha)*pointPair::angleStep-PI) ));
					//std::cout << bestPosesAux[omp_get_thread_num()].size() <<" " <<omp_get_thread_num()<< std::endl;
				}
			}
		}
	}

	std::cout << "Done" << std::endl;


	for(int i=0; i<omp_get_num_procs(); ++i)
	{
		for(unsigned int j=0; j<bestPosesAux[i].size(); ++j)
			bestPoses.push_back(bestPosesAux[i][j]);
	}
	std::cout << "\thypothesis number: " << bestPoses.size() << std::endl << std::endl;

	if(bestPoses.size()==0)
	{
		clusters.clear();
		return clusters;
	}

	
	//////////////////////
	// Compute clusters //
	//////////////////////
Tac();
	std::cout << "\tCompute clusters... ";
Tic();
	clusters=poseClustering(bestPoses);
Tac();
	std::cout << "Done" << std::endl;

	return clusters;
}
// CLUSTERING METHOD
std::vector<cluster> poseEstimationSV::poseClustering(std::vector<pose> & bestPoses)
{
	// Sort clusters
	std::sort(bestPoses.begin(), bestPoses.end(), pose::compare);

	std::vector<poseCluster> centroids;
	std::vector<cluster> clusters;    

	float _orientationDistance;	
	float _positionDistance;
	bool found;
	
	int nearestCentroid;

	///////////////////////////////
	// Initialize first centroid //
	///////////////////////////////

	// If there are no poses, return
	if(bestPoses.empty())
		return clusters;

    centroids.reserve(bestPoses.size());

	centroids.push_back(poseCluster(0,bestPoses[0]));

	// For each pose
	for(size_t p=1; p< bestPoses.size(); ++p)	
	{

		found=false;
		for(size_t c=0; c < centroids.size(); ++c)
		{
			// Compute (squared) distance to the cluster center

			_positionDistance=positionDistance(bestPoses[p],bestPoses[centroids[c].poseIndex]);
			if(_positionDistance<=model->halfDistanceStepSquared)
				continue;
			_orientationDistance=orientationDistance(bestPoses[p],bestPoses[centroids[c].poseIndex]);

			// If the cluster is nearer than a threshold add current pose to the cluster
			if(_orientationDistance>=pointPair::angleStepCos) // VER ISTO
			{
				nearestCentroid=c;
				found=true;
				break;
			}		
		}			
			
		if(found)
		{
			//std::cout << " yes:" << "pose:"<< p <<"nearest centroid:" << nearestCentroid << " " << 2*acos(_orientationDistance)*RAD_TO_DEG << " "<< _positionDistance << " " << bestPoses[p].votes << std::endl;
			centroids[nearestCentroid].update(bestPoses[p]);
		}
		else
		{
			//if(2*acos(_orientationDistance)*RAD_TO_DEG< 6.000 && _positionDistance<model->halfDistanceStepSquared)
			//	std::cout << "angle:" << pointPair::angleStep*RAD_TO_DEG  << "angle threshold: " <<2*acos(pointPair::angleStepCos)*RAD_TO_DEG<< " no:" << 2*acos(_orientationDistance)*RAD_TO_DEG << " "<< _positionDistance << std::endl;
			centroids.push_back(poseCluster(p,bestPoses[p]));
		}
		//std::cout << p << std::endl;
	}

	//////////////////////
	// Compute clusters //
	//////////////////////
	//std::sort(centroids.begin(), centroids.end(), poseCluster::compare);

	// Normalize centroids
	float totalModelVotes=static_cast<float>(model->modelCloud->size()*(model->modelCloud->size()-1));
	//std::cout << totalModelVotes << " " << model->totalSurfaceArea << std::endl;
	//std::cout << "Best centroid score before: " << centroids[0].votes << std::endl;


	//std::cout << "Best centroid score after: " << centroids[0].votes << std::endl;
	// end normalize centroids
	//std::cout << std::endl << "Number of poses:" <<bestPoses.size() << std::endl << std::endl;
	//std::cout << std::endl << "Best pose votes:" <<bestPoses[0].votes << std::endl << std::endl;
	if(filterOn)
	{

        std::cout << "Best centroid score: " << centroids[0].votes << std::endl;
        std::cout << "Best centroid score(normalized): " << (float) centroids[0].votes/totalModelVotes << std::endl;
		clusters.reserve(centroids.size());
		for(size_t c=0; c < centroids.size(); ++c)
		//for(size_t c=0; c < 1; ++c)
		{

			clusters.push_back(cluster(pose (centroids[c].votes, transformation(centroids[c].rotation(),centroids[c].translation() ) ),centroids[c].poses ) );
		}
		clusterClusters(clusters);
		std::sort(clusters.begin(), clusters.end(), cluster::compare);
	}
	else
	{
		std::sort(centroids.begin(), centroids.end(), poseCluster::compare);
		//clusters.push_back(cluster(pose (centroids[0].votes, transformation(centroids[0].rotation(),centroids[0].translation() ) ),centroids[0].poses )); //METER
		for(size_t c=0; c < centroids.size(); ++c) //TIRAR DEPOIS DOS TESTS
		//for(size_t c=0; c < 1; ++c)
		{

			clusters.push_back(cluster(pose (centroids[c].votes, transformation(centroids[c].rotation(),centroids[c].translation() ) ),centroids[c].poses ) );
		}
	}

	for(size_t c=0; c < clusters.size(); ++c)
	{
		clusters[c].normalizeVotes(model->totalSurfaceArea/totalModelVotes); // normalize votes weighs by the argument factor
		//std::cout << "centroid poses:" << centroids[c].poses.size()<< "centroid score after: " << centroids[c].votes << std::endl;
	}


	/*for(size_t p=0; p< 1; ++p)
	{
		Eigen::Vector3f eulerAngles;
		objectModel::quaternionToEuler(clusters[p].meanPose.transform.rotation, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
		float x,y,z;
		std::cout << "1. EULER ANGLES: "<< std::endl << eulerAngles.transpose()*RAD_TO_DEG << std::endl;
		pcl::getTranslationAndEulerAngles (clusters[p].meanPose.getTransformation(), x, y, z, eulerAngles[0], eulerAngles[1], eulerAngles[2]);
		std::cout << "2. EULER ANGLES: "<< std::endl << eulerAngles.transpose()*RAD_TO_DEG << std::endl << std::endl;

	}*/

	return clusters;
}
int main(int argc, char** argv){
  ros::init(argc, argv, "transformation_tests");

  ros::NodeHandle n;
  ros::Publisher odom_pub = n.advertise<nav_msgs::Odometry>("odom", 50);
  tf::TransformBroadcaster odom_broadcaster;

  double x = 0.0;
  double y = 0.0;
  double th = 0.0;

  double vx = 0.1;
  double vy = -0.1;
  double vth = 0.1;

  ros::Time current_time, last_time;
  current_time = ros::Time::now();
  last_time = ros::Time::now();

  ros::Rate r(1.0);

  visual_slam::Robot_Pose pose(0.0,0.0,0.5,0.0,0.0,0.0);
  double rYaw = pose.convertFromDegreeToRadian(60);
  visual_slam::TFMatrix tf = pose.getTransformationMatrix(0.0,0.0,0.0,0.0,0.0,rYaw);

  while(n.ok()){

    ros::spinOnce();
    current_time = ros::Time::now();

    //compute odometry in a typical way given the velocities of the robot
    visual_slam::TFMatrix Inverse_transformation = pose.getInverseTransformation(tf);

    pose *= Inverse_transformation;

    visual_slam::RobotPose6D robotPose = pose.getRobotPose();
    x = robotPose(0);
    y = robotPose(1);
    th = robotPose(5);
    ROS_INFO("%f " , th);

    //since all odometry is 6DOF we'll need a quaternion created from yaw
    geometry_msgs::Quaternion odom_quat = tf::createQuaternionMsgFromYaw(th);

    //first, we'll publish the transform over tf
    geometry_msgs::TransformStamped odom_trans;
    odom_trans.header.stamp = current_time;
    odom_trans.header.frame_id = "odom";
    odom_trans.child_frame_id = "base_link";

    odom_trans.transform.translation.x = robotPose(0);
    odom_trans.transform.translation.y = robotPose(1);
    odom_trans.transform.translation.z = robotPose(2);
    odom_trans.transform.rotation = odom_quat;

    //send the transform
    odom_broadcaster.sendTransform(odom_trans);

    //next, we'll publish the odometry message over ROS
    nav_msgs::Odometry odom;
    odom.header.stamp = current_time;
    odom.header.frame_id = "odom";

    //set the position
    odom.pose.pose.position.x = robotPose(0);
    odom.pose.pose.position.y = robotPose(1);
    odom.pose.pose.position.z = robotPose(2);
    odom.pose.pose.orientation = odom_quat;

    //set the velocity
    odom.child_frame_id = "base_link";
    //odom.twist.twist.linear.x = vx;
    //odom.twist.twist.linear.y = vy;
    //odom.twist.twist.angular.z = vth;

    //publish the message
    odom_pub.publish(odom);

    last_time = current_time;
    r.sleep();
  }
}
Ejemplo n.º 16
0
hkDemo::Result NormalBlendingDemo::stepDemo()
{

	hkReal runWeight = m_control[HK_RUN_ANIM]->getMasterWeight();

	// Update input
	{
		const  hkReal inc = 0.01f;
		const hkgPad* pad = m_env->m_gamePad;
		if ((pad->getButtonState() & HKG_PAD_DPAD_UP) != 0)
			runWeight+=inc;
		if ((pad->getButtonState() & HKG_PAD_DPAD_DOWN) != 0)
			runWeight-=inc;

		runWeight = hkMath::min2(runWeight, 1.0f);
		runWeight = hkMath::max2(runWeight, 0.0f);
	}

	const hkReal walkWeight = 1.0f - runWeight;

	// Set the animation weights
	{
		m_control[HK_RUN_ANIM]->setMasterWeight( runWeight );
		m_control[HK_WALK_ANIM]->setMasterWeight( walkWeight );
	}

	// Sync playback speeds
	{
		const hkReal runPeriod = m_control[HK_RUN_ANIM]->getAnimationBinding()->m_animation->m_duration;
		const hkReal walkPeriod = m_control[HK_WALK_ANIM]->getAnimationBinding()->m_animation->m_duration;

		const hkReal totalW = runWeight+walkWeight+1e-6f;
		const hkReal walkP = walkWeight / totalW;
		const hkReal runP = runWeight / totalW;
		const hkReal runWalkRatio = runPeriod / walkPeriod;
		m_control[HK_RUN_ANIM]->setPlaybackSpeed( (1-runP) * runWalkRatio + runP );
		m_control[HK_WALK_ANIM]->setPlaybackSpeed( (1-walkP) * (1.0f / runWalkRatio) + walkP );
	}

	// Show blend ratio
	{
		char buf[255];
		hkString::sprintf(buf, "run:%0.3f walk:%0.3f", runWeight, walkWeight);

		const int h = m_env->m_window->getHeight();
		m_env->m_textDisplay->outputText( buf, 20, h-40, 0xffffffff, 1);
	}

	// Advance the active animations
	m_skeletonInstance->stepDeltaTime( m_timestep );

	const int boneCount = m_skeleton->m_numBones;



	// Sample the active animations and combine into a single pose
	hkaPose pose(m_skeleton);
	m_skeletonInstance->sampleAndCombineAnimations( pose.accessUnsyncedPoseLocalSpace().begin(), pose.getFloatSlotValues().begin()  );
	AnimationUtils::drawPose( pose, hkQsTransform::getIdentity() );

	// Construct the composite world transform
	hkLocalArray<hkTransform> compositeWorldInverse( boneCount );
	compositeWorldInverse.setSize( boneCount );

	const hkArray<hkQsTransform>& poseInWorld = pose.getSyncedPoseModelSpace();

	// Skin the meshes
	for (int i=0; i < m_numSkinBindings; i++)
	{
		// assumes either a straight map (null map) or a single one (1 palette)
		hkInt16* usedBones = m_skinBindings[i]->m_mappings? m_skinBindings[i]->m_mappings[0].m_mapping : HK_NULL;
		int numUsedBones = usedBones? m_skinBindings[i]->m_mappings[0].m_numMapping : boneCount;

		// Multiply through by the bind pose inverse world inverse matrices
		for (int p=0; p < numUsedBones; p++)
		{
			int boneIndex = usedBones? usedBones[p] : p;
			compositeWorldInverse[p].setMul( poseInWorld[ boneIndex ], m_skinBindings[i]->m_boneFromSkinMeshTransforms[ boneIndex ] );
		}

		AnimationUtils::skinMesh( *m_skinBindings[i]->m_mesh, hkTransform::getIdentity(), compositeWorldInverse.begin(), *m_env->m_sceneConverter );
	}

	return hkDemo::DEMO_OK;
}
Ejemplo n.º 17
0
hkDemo::Result LookAtDemo::stepDemo()
{
	// Advance the active animations
	m_skeletonInstance->stepDeltaTime( 0.016f );

	int boneCount = m_skeleton->m_numBones;	
	hkaPose pose (m_skeleton);

	// Sample the active animations and combine into a single pose
	m_skeletonInstance->sampleAndCombineAnimations( pose.accessUnsyncedPoseLocalSpace().begin(), pose.getFloatSlotValues().begin()  );

	// m_targetPosition is similar to camera
	hkVector4 newTargetPosition;
	m_env->m_window->getCurrentViewport()->getCamera()->getFrom((float*)&newTargetPosition);

	hkaLookAtIkSolver::Setup setup;
	setup.m_fwdLS.set(0,1,0);
	setup.m_eyePositionLS.set(0.1f,0.1f,0);
	setup.m_limitAxisMS.setRotatedDir(pose.getBoneModelSpace(m_neckIndex).getRotation(), hkVector4(0,1,0));
	setup.m_limitAngle = HK_REAL_PI / 3.0f;

	// Check if the angle-limited variant is in use
	if ( m_variantId == 0 )
	{
		// Solve normally
		hkaLookAtIkSolver::solve (setup, newTargetPosition, 1.0f, pose.accessBoneModelSpace(m_headIndex, hkaPose::PROPAGATE), HK_NULL );
	}
	else
	{
		// Define the range of motion
		hkaLookAtIkSolver::RangeLimits range;
		range.m_limitAngleLeft = 90.0f * HK_REAL_DEG_TO_RAD;
		range.m_limitAngleRight = -90.0f * HK_REAL_DEG_TO_RAD;
		range.m_limitAngleUp = 30.0f * HK_REAL_DEG_TO_RAD;
		range.m_limitAngleDown = -10.0f * HK_REAL_DEG_TO_RAD;

		hkVector4 upLS;
		upLS.set(1,0,0);
		range.m_upAxisMS.setRotatedDir(pose.getBoneModelSpace(m_neckIndex).getRotation(), upLS);

		// Solve using user-defined range of motion
		hkaLookAtIkSolver::solve (setup, newTargetPosition, 1.0f, pose.accessBoneModelSpace(m_headIndex, hkaPose::PROPAGATE), &range );
	}


	setup.m_fwdLS.set(0,-1,0);
	setup.m_eyePositionLS.set(0.0f,0.0f,0);
	setup.m_limitAxisMS.setRotatedDir(pose.getBoneModelSpace(m_headIndex).getRotation(), hkVector4(0,1,0));
	setup.m_limitAngle = HK_REAL_PI / 10.0f;
	hkaLookAtIkSolver::solve (setup, newTargetPosition, 1.0f, pose.accessBoneModelSpace(m_rEyeIndex, hkaPose::PROPAGATE));

	setup.m_fwdLS.set(0,-1,0);
	setup.m_eyePositionLS.set(0.0f,0.0f,0);
	setup.m_limitAxisMS.setRotatedDir(pose.getBoneModelSpace(m_headIndex).getRotation(), hkVector4(0,1,0));
	setup.m_limitAngle = HK_REAL_PI / 10.0f;
	hkaLookAtIkSolver::solve (setup, newTargetPosition, 1.0f, pose.accessBoneModelSpace(m_lEyeIndex, hkaPose::PROPAGATE));
	
	//
	// draw the skeleton pose
	//
	AnimationUtils::drawPose( pose, hkQsTransform::getIdentity() );

	// Skin
	{
		// Work with the pose in world
		const hkArray<hkQsTransform>& poseInWorld = pose.getSyncedPoseModelSpace();

		// Construct the composite world transform
		hkLocalArray<hkTransform> compositeWorldInverse( boneCount );
		compositeWorldInverse.setSize( boneCount );

		// Skin the meshes
		for (int i=0; i < m_numSkinBindings; i++)
		{
			// assumes either a straight map (null map) or a single one (1 palette)
			hkInt16* usedBones = m_skinBindings[i]->m_mappings? m_skinBindings[i]->m_mappings[0].m_mapping : HK_NULL;
			int numUsedBones = usedBones? m_skinBindings[i]->m_mappings[0].m_numMapping : boneCount;

			// Multiply through by the bind pose inverse world inverse matrices
			for (int p=0; p < numUsedBones; p++)
			{
				int boneIndex = usedBones? usedBones[p] : p;
				compositeWorldInverse[p].setMul( poseInWorld[ boneIndex ], m_skinBindings[i]->m_boneFromSkinMeshTransforms[ boneIndex ] );
			}

			AnimationUtils::skinMesh( *m_skinBindings[i]->m_mesh, hkTransform::getIdentity(), compositeWorldInverse.begin(), *m_env->m_sceneConverter );
		}

	}

	// Move the attachments
	{
		HK_ALIGN(float f[16], 16);
		for (int a=0; a < m_numAttachments; a++)
		{
			if (m_attachmentObjects[a])
			{
				hkaBoneAttachment* ba = m_attachments[a];  
				pose.getBoneModelSpace( ba->m_boneIndex ).get4x4ColumnMajor(f);
				hkMatrix4 worldFromBone; worldFromBone.set4x4ColumnMajor(f);
				hkMatrix4 worldFromAttachment; worldFromAttachment.setMul(worldFromBone, ba->m_boneFromAttachment);
				m_env->m_sceneConverter->updateAttachment(m_attachmentObjects[a], worldFromAttachment);
			}
		}
	}

	return hkDemo::DEMO_OK;
}
Ejemplo n.º 18
0
void MapCloudDisplay::update( float wall_dt, float ros_dt )
{
	rviz::PointCloud::RenderMode mode = (rviz::PointCloud::RenderMode) style_property_->getOptionInt();

	if (needs_retransform_)
	{
		retransform();
		needs_retransform_ = false;
	}

	{
		boost::mutex::scoped_lock lock(new_clouds_mutex_);
		if( !new_cloud_infos_.empty() )
		{
			float size;
			if( mode == rviz::PointCloud::RM_POINTS ) {
				size = point_pixel_size_property_->getFloat();
			} else {
				size = point_world_size_property_->getFloat();
			}

			std::map<int, CloudInfoPtr>::iterator it = new_cloud_infos_.begin();
			std::map<int, CloudInfoPtr>::iterator end = new_cloud_infos_.end();
			for (; it != end; ++it)
			{
				CloudInfoPtr cloud_info = it->second;

				cloud_info->cloud_.reset( new rviz::PointCloud() );
				cloud_info->cloud_->addPoints( &(cloud_info->transformed_points_.front()), cloud_info->transformed_points_.size() );
				cloud_info->cloud_->setRenderMode( mode );
				cloud_info->cloud_->setAlpha( alpha_property_->getFloat() );
				cloud_info->cloud_->setDimensions( size, size, size );
				cloud_info->cloud_->setAutoSize(false);

				cloud_info->manager_ = context_->getSceneManager();

				cloud_info->scene_node_ = scene_node_->createChildSceneNode();

				cloud_info->scene_node_->attachObject( cloud_info->cloud_.get() );
				cloud_info->scene_node_->setVisible(false);

				cloud_infos_.insert(*it);
			}

			new_cloud_infos_.clear();
		}
	}

	{
		boost::recursive_mutex::scoped_try_lock lock( transformers_mutex_ );

		if( lock.owns_lock() )
		{
			if( new_xyz_transformer_ || new_color_transformer_ )
			{
				M_TransformerInfo::iterator it = transformers_.begin();
				M_TransformerInfo::iterator end = transformers_.end();
				for (; it != end; ++it)
				{
					const std::string& name = it->first;
					TransformerInfo& info = it->second;

					setPropertiesHidden( info.xyz_props, name != xyz_transformer_property_->getStdString() );
					setPropertiesHidden( info.color_props, name != color_transformer_property_->getStdString() );
				}
			}
		}

		new_xyz_transformer_ = false;
		new_color_transformer_ = false;
	}

	int totalPoints = 0;
	int totalNodesShown = 0;
	{
		// update poses
		boost::mutex::scoped_lock lock(current_map_mutex_);
		if(!current_map_.empty())
		{
			for (std::map<int, rtabmap::Transform>::iterator it=current_map_.begin(); it != current_map_.end(); ++it)
			{
				std::map<int, CloudInfoPtr>::iterator cloudInfoIt = cloud_infos_.find(it->first);
				if(cloudInfoIt != cloud_infos_.end())
				{
					totalPoints += cloudInfoIt->second->transformed_points_.size();
					cloudInfoIt->second->pose_ = it->second;
					Ogre::Vector3 framePosition;
					Ogre::Quaternion frameOrientation;
					if (context_->getFrameManager()->getTransform(cloudInfoIt->second->message_->header, framePosition, frameOrientation))
					{
						// Multiply frame with pose
						Ogre::Matrix4 frameTransform;
						frameTransform.makeTransform( framePosition, Ogre::Vector3(1,1,1), frameOrientation);
						const rtabmap::Transform & p = cloudInfoIt->second->pose_;
						Ogre::Matrix4 pose(p[0], p[1], p[2], p[3],
										 p[4], p[5], p[6], p[7],
										 p[8], p[9], p[10], p[11],
										 0, 0, 0, 1);
						frameTransform = frameTransform * pose;
						Ogre::Vector3 posePosition = frameTransform.getTrans();
						Ogre::Quaternion poseOrientation = frameTransform.extractQuaternion();
						poseOrientation.normalise();

						cloudInfoIt->second->scene_node_->setPosition(posePosition);
						cloudInfoIt->second->scene_node_->setOrientation(poseOrientation);
						cloudInfoIt->second->scene_node_->setVisible(true);
						++totalNodesShown;
					}
					else
					{
						ROS_ERROR("MapCloudDisplay: Could not update pose of node %d", it->first);
					}

				}
			}
			//hide not used clouds
			for(std::map<int, CloudInfoPtr>::iterator iter = cloud_infos_.begin(); iter!=cloud_infos_.end(); ++iter)
			{
				if(current_map_.find(iter->first) == current_map_.end())
				{
					iter->second->scene_node_->setVisible(false);
				}
			}
		}
	}

	this->setStatusStd(rviz::StatusProperty::Ok, "Points", tr("%1").arg(totalPoints).toStdString());
	this->setStatusStd(rviz::StatusProperty::Ok, "Nodes", tr("%1 shown of %2").arg(totalNodesShown).arg(cloud_infos_.size()).toStdString());
}
Ejemplo n.º 19
0
	static pose cartesian (double x, double y, double bearing) {
            return pose ({ x, y }, bearing);
	}
void keypoint_map::optimize() {

	g2o::SparseOptimizer optimizer;
	optimizer.setVerbose(true);
	g2o::BlockSolver_6_3::LinearSolverType * linearSolver;

	linearSolver = new g2o::LinearSolverCholmod<
			g2o::BlockSolver_6_3::PoseMatrixType>();

	g2o::BlockSolver_6_3 * solver_ptr = new g2o::BlockSolver_6_3(linearSolver);
	g2o::OptimizationAlgorithmLevenberg* solver =
			new g2o::OptimizationAlgorithmLevenberg(solver_ptr);
	optimizer.setAlgorithm(solver);

	double focal_length = intrinsics[0];
	Eigen::Vector2d principal_point(intrinsics[2], intrinsics[3]);

	g2o::CameraParameters * cam_params = new g2o::CameraParameters(focal_length,
			principal_point, 0.);
	cam_params->setId(0);

	if (!optimizer.addParameter(cam_params)) {
		assert(false);
	}

	std::cerr << camera_positions.size() << " " << keypoints3d.size() << " "
			<< observations.size() << std::endl;

	int vertex_id = 0, point_id = 0;

	for (size_t i = 0; i < camera_positions.size(); i++) {
		Eigen::Affine3f cam_world = camera_positions[i].inverse();
		Eigen::Vector3d trans(cam_world.translation().cast<double>());
		Eigen::Quaterniond q(cam_world.rotation().cast<double>());

		g2o::SE3Quat pose(q, trans);
		g2o::VertexSE3Expmap * v_se3 = new g2o::VertexSE3Expmap();
		v_se3->setId(vertex_id);
		if (i < 1) {
			v_se3->setFixed(true);
		}
		v_se3->setEstimate(pose);
		optimizer.addVertex(v_se3);
		vertex_id++;
	}

	for (size_t i = 0; i < keypoints3d.size(); i++) {
		g2o::VertexSBAPointXYZ * v_p = new g2o::VertexSBAPointXYZ();
		v_p->setId(vertex_id + point_id);
		v_p->setMarginalized(true);
		v_p->setEstimate(keypoints3d[i].getVector3fMap().cast<double>());
		optimizer.addVertex(v_p);
		point_id++;
	}

	for (size_t i = 0; i < observations.size(); i++) {
		g2o::EdgeProjectXYZ2UV * e = new g2o::EdgeProjectXYZ2UV();
		e->setVertex(0,
				dynamic_cast<g2o::OptimizableGraph::Vertex*>(optimizer.vertices().find(
						vertex_id + observations[i].point_id)->second));
		e->setVertex(1,
				dynamic_cast<g2o::OptimizableGraph::Vertex*>(optimizer.vertices().find(
						observations[i].cam_id)->second));
		e->setMeasurement(observations[i].coord.cast<double>());
		e->information() = Eigen::Matrix2d::Identity();

		g2o::RobustKernelHuber* rk = new g2o::RobustKernelHuber;
		e->setRobustKernel(rk);

		e->setParameterId(0, 0);
		optimizer.addEdge(e);

	}

	//optimizer.save("debug.txt");

	optimizer.initializeOptimization();
	optimizer.setVerbose(true);

	std::cout << std::endl;
	std::cout << "Performing full BA:" << std::endl;
	optimizer.optimize(1);
	std::cout << std::endl;

	for (int i = 0; i < vertex_id; i++) {
		g2o::HyperGraph::VertexIDMap::iterator v_it = optimizer.vertices().find(
				i);
		if (v_it == optimizer.vertices().end()) {
			std::cerr << "Vertex " << i << " not in graph!" << std::endl;
			exit(-1);
		}

		g2o::VertexSE3Expmap * v_c =
				dynamic_cast<g2o::VertexSE3Expmap *>(v_it->second);
		if (v_c == 0) {
			std::cerr << "Vertex " << i << "is not a VertexSE3Expmap!"
					<< std::endl;
			exit(-1);
		}

		Eigen::Affine3f pos;
		pos.fromPositionOrientationScale(
				v_c->estimate().translation().cast<float>(),
				v_c->estimate().rotation().cast<float>(),
				Eigen::Vector3f(1, 1, 1));
		camera_positions[i] = pos.inverse();
	}

	for (int i = 0; i < point_id; i++) {
		g2o::HyperGraph::VertexIDMap::iterator v_it = optimizer.vertices().find(
				vertex_id + i);
		if (v_it == optimizer.vertices().end()) {
			std::cerr << "Vertex " << vertex_id + i << " not in graph!"
					<< std::endl;
			exit(-1);
		}

		g2o::VertexSBAPointXYZ * v_p =
				dynamic_cast<g2o::VertexSBAPointXYZ *>(v_it->second);
		if (v_p == 0) {
			std::cerr << "Vertex " << vertex_id + i
					<< "is not a VertexSE3Expmap!" << std::endl;
			exit(-1);
		}

		keypoints3d[i].getVector3fMap() = v_p->estimate().cast<float>();
	}

}
Ejemplo n.º 21
0
	static pose from_position (const position& position, double bearing) {
            return pose (position.pos, bearing);
	}
Ejemplo n.º 22
0
 Frame & UndirectedTreeLink::pose(LinkMap::const_iterator adjacent_iterator, const double& q) const
 {
     return pose(globalIterator2localIndex(adjacent_iterator),q);
 }
Ejemplo n.º 23
0
Pose2D operator+(const Pose2D& op1, const Pose2D& op2) {
    Pose2D pose(op1.getX() + op2.getX(), op1.getY() + op2.getY(), op1.getTheta() + op2.getTheta());

    return pose;
}
Ejemplo n.º 24
0
void Omnidrive::main()
{
  double speed, acc_max, t, radius, drift;
  int tf_frequency, runstop_frequency;
  const int loop_frequency = 250; // 250Hz update frequency

  n_.param("speed", speed, 100.0); // 0.1
  // default acc: brake from max. speed to 0 within 1.5cm
  n_.param("acceleration", acc_max, 1000.0);  //0.5*speed*speed/0.015
  // radius of the robot
  n_.param("radius", radius, 0.6);
  n_.param("tf_frequency", tf_frequency, 50);
  n_.param("runstop_frequency", runstop_frequency, 10);
  n_.param("watchdog_period", t, 0.5);
  ros::Duration watchdog_period(t);
  n_.param("odometry_correction", drift, 1.0);

  double x=0, y=0, a=0;


	//IO
  ros::Subscriber sub = n_.subscribe("/base_interface/state", 1000, base_state);
	ros::Publisher odom_pub = n_.advertise<nav_msgs::Odometry>("odom", 50);
  tf::TransformBroadcaster transforms;

  acc_max /= loop_frequency;


  // This function is used to add a correction value if the base seems to be slightly off.
  omnidrive_set_correction(drift);


	ros::Time current_time;

  ros::Rate r(loop_frequency);
	printf("Entering ROS main loop\n");
  while(n_.ok()) {
    current_time = ros::Time::now();
		//This make sure you don't get an error for the vector not being set.
		if(got_pos == 1){

			omnidrive_odometry(Vel[0],Vel[1],Vel[2],Vel[3],Pos[0],Pos[1],Pos[2],Pos[3], &x, &y, &a);

			//printf("x %f, y %f, a%f\n",x,y,a);
			
			//
			tf::Quaternion q;
			q.setRPY(0, 0, a);
			tf::Transform pose(q, tf::Point(x, y, 0.0));
			transforms.sendTransform(tf::StampedTransform(pose, current_time, frame_id_, child_frame_id_));
			//tf_publish_counter = 0;

			//since all odometry is 6DOF we'll need a quaternion created from yaw
    	geometry_msgs::Quaternion odom_quat = tf::createQuaternionMsgFromYaw(a);//th);

			//next, we'll publish the odometry message over ROS
	    nav_msgs::Odometry odom;
	    odom.header.stamp = current_time;
	    odom.header.frame_id = frame_id_;
	
	    //set the position
	    odom.pose.pose.position.x = x;
	    odom.pose.pose.position.y = y;
	    odom.pose.pose.position.z = 0.0;
	    odom.pose.pose.orientation = odom_quat;
	
	    //set the velocity
	    odom.child_frame_id = child_frame_id_;
	    odom.twist.twist.linear.x = x;//vx;
	    odom.twist.twist.linear.y = y;//vy;
	    odom.twist.twist.angular.z = 0.0;//vth;
	
	    //publish the message
	    odom_pub.publish(odom);



		}

    ros::spinOnce();
    r.sleep();
  }

}
Ejemplo n.º 25
0
void OvrSliderComponent::GetVerticalSliderParms( VRMenu & menu, VRMenuId_t const parentId, VRMenuId_t const rootId, 
		Posef const & rootLocalPose, VRMenuId_t const scrubberId, VRMenuId_t const bubbleId, 
		float const sliderFrac, Vector3f const & localSlideDelta,
		float const minValue, float const maxValue, float const sensitivityScale, 
		Array< VRMenuObjectParms const* > & parms )
{
	Vector3f const fwd( 0.0f, 0.0f, -1.0f );
	Vector3f const right( 1.0f, 0.0f, 0.0f );
	Vector3f const up( 0.0f, 1.0f, 0.0f );

	// would be nice to determine these sizes from the images, but we do not load
	// images until much later, meaning we'd need to do the positioning after creation / init.
	float const SLIDER_BUBBLE_WIDTH = 59.0f * VRMenuObject::DEFAULT_TEXEL_SCALE;
	float const SLIDER_BUBBLE_CENTER = 33.0f * VRMenuObject::DEFAULT_TEXEL_SCALE;
	float const SLIDER_TRACK_WIDTH = 9.0f * VRMenuObject::DEFAULT_TEXEL_SCALE;
	float const TRACK_OFFSET = 35.0f * VRMenuObject::DEFAULT_TEXEL_SCALE;
	float const vertical = true;

	// add parms for the root object that holds all the slider components
	{
		Array< VRMenuComponent* > comps;
		comps.PushBack( new OvrSliderComponent( menu, sliderFrac, localSlideDelta, minValue, maxValue, sensitivityScale, rootId, scrubberId, VRMenuId_t(), bubbleId ) );
		Array< VRMenuSurfaceParms > surfParms;
		char const * text = "slider_root";
		Vector3f scale( 1.0f );
		Posef pose( rootLocalPose );
		Posef textPose( Quatf(), Vector3f( 0.0f ) );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms;
		VRMenuObjectFlags_t objectFlags;
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_CONTAINER, comps,
				surfParms, text, pose, scale, textPose, textScale, fontParms, rootId, 
				objectFlags, initFlags );
		itemParms->ParentId = parentId;
		parms.PushBack( itemParms );
	}

	// add parms for the base image that underlays the whole slider
	{
		Array< VRMenuComponent* > comps;
		Array< VRMenuSurfaceParms > surfParms;
		VRMenuSurfaceParms baseParms( "base", GetSliderImage( SLIDER_IMAGE_BASE, vertical ), SURFACE_TEXTURE_DIFFUSE,
				NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX );
		surfParms.PushBack( baseParms );
		char const * text = "base";
		Vector3f scale( 1.0f );
		Posef pose( Quatf(), Vector3f() + fwd * 0.1f );
		Posef textPose( Quatf(), Vector3f( 0.0f ) );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms;
		VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_RENDER_TEXT );
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps,
				surfParms, text, pose, scale, textPose, textScale, fontParms, VRMenuId_t(), 
				objectFlags, initFlags );
		itemParms->ParentId = rootId;
		parms.PushBack( itemParms );
	}

	// add parms for the track image
	{
		Array< VRMenuComponent* > comps;
		Array< VRMenuSurfaceParms > surfParms;
		VRMenuSurfaceParms baseParms( "track", GetSliderImage( SLIDER_IMAGE_TRACK, vertical ), SURFACE_TEXTURE_DIFFUSE,
				NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX );
		surfParms.PushBack( baseParms );
		char const * text = "track";
		Vector3f scale( 1.0f );
		Posef pose( Quatf(), up * TRACK_OFFSET + fwd * 0.09f );
		Posef textPose( Quatf(), Vector3f( 0.0f ) );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms;
		VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_RENDER_TEXT );
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps,
				surfParms, text, pose, scale, textPose, textScale, fontParms, VRMenuId_t(), 
				objectFlags, initFlags );
		itemParms->ParentId = rootId;
		parms.PushBack( itemParms );
	}

	// add parms for the filled track image
	{
		Array< VRMenuComponent* > comps;
		Array< VRMenuSurfaceParms > surfParms;
		VRMenuSurfaceParms baseParms( "track_full", GetSliderImage( SLIDER_IMAGE_TRACK_FULL, vertical ), SURFACE_TEXTURE_DIFFUSE,
				NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX );
		surfParms.PushBack( baseParms );
		char const * text = "track_full";
		Vector3f scale( 1.0f );
		Posef pose( Quatf(), up * TRACK_OFFSET + fwd * 0.08f );
		Posef textPose( Quatf(), Vector3f( 0.0f ) );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms;
		VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_RENDER_TEXT );
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps,
				surfParms, text, pose, scale, textPose, textScale, fontParms, VRMenuId_t(), 
				objectFlags, initFlags );
		itemParms->ParentId = rootId;
		parms.PushBack( itemParms );
	}

	// add parms for the scrubber
	{
		Array< VRMenuComponent* > comps;
		Array< VRMenuSurfaceParms > surfParms;
		VRMenuSurfaceParms baseParms( "scrubber", GetSliderImage( SLIDER_IMAGE_SCRUBBER, vertical ), SURFACE_TEXTURE_DIFFUSE,
				NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX );
		surfParms.PushBack( baseParms );
		char const * text = "scrubber";
		Vector3f scale( 1.0f );
		Posef pose( Quatf(), up * TRACK_OFFSET + fwd * 0.07f );
		Posef textPose( Quatf(), Vector3f( 0.0f ) );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms;
		VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_RENDER_TEXT );
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps,
				surfParms, text, pose, scale, textPose, textScale, fontParms, scrubberId, 
				objectFlags, initFlags );
		itemParms->ParentId = rootId;
		parms.PushBack( itemParms );
	}

	// add parms for the bubble
	{
		Array< VRMenuComponent* > comps;
		Array< VRMenuSurfaceParms > surfParms;
		VRMenuSurfaceParms baseParms( "bubble", GetSliderImage( SLIDER_IMAGE_BUBBLE, vertical ), SURFACE_TEXTURE_DIFFUSE,
				NULL, SURFACE_TEXTURE_MAX, NULL, SURFACE_TEXTURE_MAX );
		surfParms.PushBack( baseParms );
		char const * text = NULL;
		Vector3f scale( 1.0f );
		Posef pose( Quatf(), right * ( SLIDER_TRACK_WIDTH + SLIDER_BUBBLE_CENTER ) + fwd * 0.06f );
		const float bubbleTextScale = 0.66f;
		const float bubbleTextCenterOffset = SLIDER_BUBBLE_CENTER - ( SLIDER_BUBBLE_WIDTH * 0.5f );
		const Vector3f textPosition = right * bubbleTextCenterOffset;
		Posef textPose( Quatf(), textPosition );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms( true, true, false, false, true, bubbleTextScale );
		VRMenuObjectFlags_t objectFlags;
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps,
				surfParms, text, pose, scale, textPose, textScale, fontParms, bubbleId, 
				objectFlags, initFlags );
		itemParms->ParentId = scrubberId;
		parms.PushBack( itemParms );
	}
}
Ejemplo n.º 26
0
 ntk::Polygon2d ObjectMatch :: projectedBoundingBox() const
 {
   const ntk::Rect3f& bbox = model().boundingBox();
   const Pose3D& pose3d = pose()->pose3d();
   return project_bounding_box_to_image(pose3d, bbox);
 }
Ejemplo n.º 27
0
 Twist Segment::twist(const double* q, const double& qdot, int dof)const
 {
     return joint.twist(qdot, dof).RefPoint(pose(q).p);
 }
Ejemplo n.º 28
0
void PointCloudDisplay::transformCloud()
{
  if ( message_.header.frame_id.empty() )
  {
    message_.header.frame_id = fixed_frame_;
  }

  tf::Stamped<tf::Pose> pose( btTransform( btQuaternion( 0, 0, 0 ), btVector3( 0, 0, 0 ) ), message_.header.stamp, message_.header.frame_id );

  try
  {
    tf_->transformPose( fixed_frame_, pose, pose );
  }
  catch(tf::TransformException& e)
  {
    ROS_ERROR( "Error transforming point cloud '%s' from frame '%s' to frame '%s'\n", name_.c_str(), message_.header.frame_id.c_str(), fixed_frame_.c_str() );
  }

  Ogre::Vector3 position( pose.getOrigin().x(), pose.getOrigin().y(), pose.getOrigin().z() );
  robotToOgre( position );

  btScalar yaw, pitch, roll;
  pose.getBasis().getEulerZYX( yaw, pitch, roll );

  Ogre::Matrix3 orientation( ogreMatrixFromRobotEulers( yaw, pitch, roll ) );

  // First find the min/max intensity values
  float min_intensity = 999999.0f;
  float max_intensity = -999999.0f;

  typedef std::vector<std_msgs::ChannelFloat32> V_Chan;
  typedef std::vector<bool> V_bool;

  V_bool valid_channels(message_.chan.size());
  uint32_t point_count = message_.get_pts_size();
  V_Chan::iterator chan_it = message_.chan.begin();
  V_Chan::iterator chan_end = message_.chan.end();
  uint32_t index = 0;
  for ( ; chan_it != chan_end; ++chan_it, ++index )
  {
    std_msgs::ChannelFloat32& chan = *chan_it;
    uint32_t val_count = chan.vals.size();
    bool channel_size_correct = val_count == point_count;
    ROS_ERROR_COND(!channel_size_correct, "Point cloud '%s' on topic '%s' has channel 0 with fewer values than points (%d values, %d points)", name_.c_str(), topic_.c_str(), val_count, point_count);

    valid_channels[index] = channel_size_correct;

    if ( channel_size_correct && ( chan.name.empty() || chan.name == "intensity" || chan.name == "intensities" ) )
    {
      for(uint32_t i = 0; i < point_count; i++)
      {
        float& intensity = chan.vals[i];
        // arbitrarily cap to 4096 for now
        intensity = std::min( intensity, 4096.0f );
        min_intensity = std::min( min_intensity, intensity );
        max_intensity = std::max( max_intensity, intensity );
      }
    }
  }

  float diff_intensity = max_intensity - min_intensity;

  typedef std::vector< ogre_tools::PointCloud::Point > V_Point;
  V_Point points;
  points.resize( point_count );
  for(uint32_t i = 0; i < point_count; i++)
  {
    Ogre::Vector3 color( color_.r_, color_.g_, color_.b_ );
    ogre_tools::PointCloud::Point& current_point = points[ i ];

    current_point.x_ = message_.pts[i].x;
    current_point.y_ = message_.pts[i].y;
    current_point.z_ = message_.pts[i].z;
    current_point.r_ = color.x;
    current_point.g_ = color.y;
    current_point.b_ = color.z;
  }

  chan_it = message_.chan.begin();
  index = 0;
  for ( ; chan_it != chan_end; ++chan_it, ++index )
  {
    if ( !valid_channels[index] )
    {
      continue;
    }

    std_msgs::ChannelFloat32& chan = *chan_it;
    enum ChannelType
    {
      CT_INTENSITY,
      CT_RGB,
      CT_R,
      CT_G,
      CT_B,

      CT_COUNT
    };

    ChannelType type = CT_INTENSITY;
    if ( chan.name == "rgb" )
    {
      type = CT_RGB;
    }
    else if ( chan.name == "r" )
    {
      type = CT_R;
    }
    else if ( chan.name == "g" )
    {
      type = CT_G;
    }
    else if ( chan.name == "b" )
    {
      type = CT_B;
    }

    typedef void (*TransformFunc)(float, ogre_tools::PointCloud::Point&, float, float, float);
    TransformFunc funcs[CT_COUNT] =
    {
      transformIntensity,
      transformRGB,
      transformR,
      transformG,
      transformB
    };

    for(uint32_t i = 0; i < point_count; i++)
    {
      ogre_tools::PointCloud::Point& current_point = points[ i ];
      funcs[type]( chan.vals[i], current_point, min_intensity, max_intensity, diff_intensity );
    }
  }

  {
    RenderAutoLock renderLock( this );

    scene_node_->setPosition( position );
    scene_node_->setOrientation( orientation );

    cloud_->clear();

    if ( !points.empty() )
    {
      cloud_->addPoints( &points.front(), points.size() );
    }
  }

  causeRender();

}
Ejemplo n.º 29
0
/* ************************************************************************* */
Vector CalibratedCamera::localCoordinates(const CalibratedCamera& T2) const {
	return pose().localCoordinates(T2.pose()) ;
}
Ejemplo n.º 30
0
void OvrScrollBarComponent::GetScrollBarParms( VRMenu & menu, float scrollBarLength, const VRMenuId_t parentId, const VRMenuId_t rootId, const VRMenuId_t xformId,
	const VRMenuId_t baseId, const VRMenuId_t thumbId, const Posef & rootLocalPose, const Posef & xformPose, const int startElementIndex, 
	const int numElements, const bool verticalBar, const Vector4f & thumbBorder, Array< const VRMenuObjectParms* > & parms )
{
	// Build up the scrollbar parms
	OvrScrollBarComponent * scrollComponent = new OvrScrollBarComponent( rootId, baseId, thumbId, startElementIndex, numElements );
	scrollComponent->SetIsVertical( verticalBar );

	// parms for the root object that holds all the scrollbar components
	{
		Array< VRMenuComponent* > comps;
		comps.PushBack( scrollComponent );
		Array< VRMenuSurfaceParms > surfParms;
		char const * text = "scrollBarRoot";
		Vector3f scale( 1.0f );
		Posef pose( rootLocalPose );
		Posef textPose( Quatf(), Vector3f( 0.0f ) );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms;
		VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_HIT_ALL );
		objectFlags |= VRMENUOBJECT_DONT_RENDER_TEXT;
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION  );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_CONTAINER, comps,
			surfParms, text, pose, scale, textPose, textScale, fontParms, rootId,
			objectFlags, initFlags );
		itemParms->ParentId = parentId;
		parms.PushBack( itemParms );
	}

	// add parms for the object that serves as a transform 
	{
		Array< VRMenuComponent* > comps;
		Array< VRMenuSurfaceParms > surfParms;
		char const * text = "scrollBarTransform";
		Vector3f scale( 1.0f );
		Posef pose( xformPose );
		Posef textPose( Quatf(), Vector3f( 0.0f ) );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms;
		VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_HIT_ALL );
		objectFlags |= VRMENUOBJECT_DONT_RENDER_TEXT;
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_CONTAINER, comps,
			surfParms, text, pose, scale, textPose, textScale, fontParms, xformId,
			objectFlags, initFlags );
		itemParms->ParentId = rootId;
		parms.PushBack( itemParms );
	}

	// add parms for the base image that underlays the whole scrollbar
	{
		int sbWidth, sbHeight = 0;
		GLuint sbTexture = OVR::LoadTextureFromApplicationPackage( GetImage( SCROLLBAR_IMAGE_BASE, verticalBar ).ToCStr(), TextureFlags_t( TEXTUREFLAG_NO_DEFAULT ), sbWidth, sbHeight );
		if ( verticalBar )
		{
			scrollComponent->SetScrollBarBaseWidth( static_cast<float>( sbWidth ) );
			scrollComponent->SetScrollBarBaseHeight( scrollBarLength );
		}
		else
		{
			scrollComponent->SetScrollBarBaseWidth( scrollBarLength );
			scrollComponent->SetScrollBarBaseHeight( static_cast<float>( sbHeight ) );
		}

		Array< VRMenuComponent* > comps;
		Array< VRMenuSurfaceParms > surfParms;
		char const * text = "scrollBase";
		VRMenuSurfaceParms baseParms( text,
				sbTexture, sbWidth, sbHeight, SURFACE_TEXTURE_DIFFUSE,
				0, 0, 0, SURFACE_TEXTURE_MAX,
				0, 0, 0, SURFACE_TEXTURE_MAX );
		surfParms.PushBack( baseParms );
		Vector3f scale( 1.0f );
		Posef pose( Quatf(), Vector3f( 0.0f ) );
		Posef textPose( Quatf(), Vector3f( 0.0f ) );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms;
		VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_HIT_ALL );
		objectFlags |= VRMENUOBJECT_DONT_RENDER_TEXT;
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION  );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps,
			surfParms, text, pose, scale, textPose, textScale, fontParms, baseId,
			objectFlags, initFlags );
		itemParms->ParentId = xformId;
		parms.PushBack( itemParms );
	}

	// add parms for the thumb image of the scrollbar
	{
		int stWidth, stHeight = 0;
		GLuint stTexture = OVR::LoadTextureFromApplicationPackage( GetImage( SCROLLBAR_IMAGE_THUMB, verticalBar ).ToCStr(), TextureFlags_t( TEXTUREFLAG_NO_DEFAULT ), stWidth, stHeight );
		scrollComponent->SetScrollBarThumbWidth(  (float)( stWidth ) );
		scrollComponent->SetScrollBarThumbHeight( (float)( stHeight ) );

		Array< VRMenuComponent* > comps;
		Array< VRMenuSurfaceParms > surfParms;
		char const * text = "scrollThumb";
		VRMenuSurfaceParms thumbParms( text,
				stTexture, stWidth, stHeight, SURFACE_TEXTURE_DIFFUSE,
				0, 0, 0, SURFACE_TEXTURE_MAX,
				0, 0, 0, SURFACE_TEXTURE_MAX );
		//thumbParms.Border = thumbBorder;
		surfParms.PushBack( thumbParms );
		Vector3f scale( 1.0f );
		Posef pose( Quatf(), -FWD * THUMB_FROM_BASE_OFFSET );
		// Since we use left aligned anchors on the base and thumb, we offset the root once to center the scrollbar
		Posef textPose( Quatf(), Vector3f( 0.0f ) );
		Vector3f textScale( 1.0f );
		VRMenuFontParms fontParms;
		VRMenuObjectFlags_t objectFlags( VRMENUOBJECT_DONT_HIT_ALL );
		objectFlags |= VRMENUOBJECT_DONT_RENDER_TEXT;
		objectFlags |= VRMENUOBJECT_FLAG_POLYGON_OFFSET;
		VRMenuObjectInitFlags_t initFlags( VRMENUOBJECT_INIT_FORCE_POSITION  );
		VRMenuObjectParms * itemParms = new VRMenuObjectParms( VRMENU_BUTTON, comps,
			surfParms, text, pose, scale, textPose, textScale, fontParms, thumbId,
			objectFlags, initFlags );
		itemParms->ParentId = xformId;
		parms.PushBack( itemParms );
	}
}