Exemple #1
0
	void initialize()
		{


			// write "right" to ltl.planner
			std_msgs::String msg;
			std::stringstream ss;
			ss << "right" << std::endl;
			std::cout << "**AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGoToWaypoint -%- Executing Main Task, elapsed_time: " << std::endl;

			msg.data = ss.str();
			// ROS_INFO("%s", msg.data.c_str());
			chatter_pub.publish(msg);
			std::cout << "**msg:"
			          << msg << std::endl;
			counter++;

			sleep(1.0);
			//Set the stiffness so the robot can move
			AL::ALValue stiffness_name("Body");
			AL::ALValue stiffness(1.0f);
			AL::ALValue stiffness_time(1.0f);
			motion_proxy_ptr->stiffnessInterpolation(stiffness_name,
			                                         stiffness,
			                                         stiffness_time);
			if( message_.type == "goto")
				{
				 motion_proxy_ptr->moveInit();
				}
			set_feedback(RUNNING);
		}
void NaoMarkServiceDetection::callback(const std::string &key, const AL::ALValue &value, const AL::ALValue &msg) 
{
	AL::ALValue marks = fMemoryProxy.getData("LandmarkDetected");

	if(marks.getSize() > 0 && !_isMarkFound)
	{
		int TimeStampField = marks[0][1];
		if((int)marks[1][0][1][0] == _markToFind)
		{
			if((float)marks[1][0][0][3] < 0.2f)
			{
				if(_isAllowedToMove)
					motionProxy->moveToward(0.5f,0,marks[1][0][0][1]);
				_naoMarkDetected = true;
			}
			else
			{
				motionProxy->moveToward(0,0,0);
				_isMarkFound = true;
			}
		}
		else
			motionProxy->moveToward(0,0,0);
	}
}
	void initialize()
	{
		init_ = true;

		// Enable stiffness
		AL::ALValue stiffness_name("Body");
		AL::ALValue stiffness(1.0f);
		AL::ALValue stiffness_time(1.0f);
		motion_proxy_ptr->stiffnessInterpolation(stiffness_name,stiffness,stiffness_time);

		// Init moving
		motion_proxy_ptr->moveInit();

		// Robot not detected
		robotDetected = false;
	}
	void finalize()
	{
		// Stop moving
		motion_proxy_ptr->stopMove();

		init_ = false;
		deactivate();
	}
	void finalize()
	{
		// Stop rotating
		motion_proxy_ptr->stopMove();

		// Delete Filter
		delete ic;

		init_ = false;
		deactivate();
	}
	void initialize()
	{
		init_ = true;

		// Enable stiffness
		AL::ALValue stiffness_name("Body");
		AL::ALValue stiffness(1.0f);
		AL::ALValue stiffness_time(1.0f);
		motion_proxy_ptr->stiffnessInterpolation(stiffness_name,stiffness,stiffness_time);

		// Stand
		//robotPosture->goToPosture("Stand",0.5f);

		// Init moving
		motion_proxy_ptr->moveInit();

		// Initial Odometry
		x_0 = motion_proxy_ptr->getRobotPosition(true).at(0);
		y_0 = motion_proxy_ptr->getRobotPosition(true).at(1);
		z_0 = motion_proxy_ptr->getRobotPosition(true).at(2);
	}
void NaoMarkServiceDetection::backGroundService()
{ 
	_isThreadRuning = true;
	while(_isThreadRuning)
	{
		_naoMarkDetected = false;
		Sleep((DWORD)1000);
		if(!_naoMarkDetected && _markToFind != -1 && !_isMarkFound)
		{
			motionProxy->moveToward(0,0,0);
			FindMarkArround();
		}
	}
}
	int executeCB(ros::Duration dt)
	{
		std::cout << "**Walk -%- Executing Main Task, elapsed_time: "
		          << dt.toSec() << std::endl;
		std::cout << "**Walk -%- execute_time: "
		          << execute_time_.toSec() << std::endl;
		execute_time_ += dt;

		if(!init_)
		{
			set_feedback(RUNNING);
			initialize();
		}

		double x = motion_proxy_ptr->getRobotPosition(true).at(0);
		double y = motion_proxy_ptr->getRobotPosition(true).at(1);
		double z = motion_proxy_ptr->getRobotPosition(true).at(2);

		// Walk forward
		double angular = 0.1*modulo2Pi(z_0-z);
		//ROS_INFO("theta = %f",angular);
		if(angular > 1) {angular = 1;}
		if(angular < -1) {angular = -1;}
		geometry_msgs::Twist cmd;
		cmd.linear.x = 0.3; //0.5
		cmd.angular.z = angular;
		cmd_pub.publish(cmd);

		if(sqrt((x-x_0)*(x-x_0) + (y-y_0)*(y-y_0)) > dist)
		{
			set_feedback(SUCCESS);
			finalize();
			return 1;
		}

		return 0;
	}
Exemple #9
0
	int executeCB(ros::Duration dt)
		{
			std::cout << "**GoToWaypoint -%- Executing Main Task, elapsed_time: "
			          << dt.toSec() << std::endl;
			std::cout << "**GoToWaypoint -%- execute_time: "
			          << execute_time_.toSec() << std::endl;
			execute_time_ += dt;

			std::cout << "**Counter value:" << counter << std::endl;
			if (counter > 1)
				std::cout << "********************************************************************************" << std::endl;


			if (!init_)
			{
				initialize();
				init_ = true;
			}
			if ( (ros::Time::now() - time_at_pos_).toSec() < 0.2 )
			{
				if( message_.type == "goto")
				{
						// Goal position of ball relative to ROBOT_FRAME
						float goal_x = 0.00;
						float goal_y = 0.00;
						float error_x = message_.x - goal_x;
						float error_y = message_.y - goal_y;
						if (fabs(error_x) < 0.12 && fabs(error_y) < 0.12)
						{
							std::cout << "Closeness count " << closeness_count << std::endl;
							closeness_count++;
							//If the NAO has been close for enough iterations, we consider to goal reached
							if (closeness_count > 0)
							{
								motion_proxy_ptr->stopMove();
								set_feedback(SUCCESS);
								// std::cout << "sleeeping for 2 second before destroying thread" << std::endl;
								// sleep(2.0);
								finalize();
								return 1;
							}
						}
						else
						{
							closeness_count = 0;
						}
						//Limit the "error" in order to limit the walk speed
						error_x = error_x >  0.6 ?  0.6 : error_x;
						error_x = error_x < -0.6 ? -0.6 : error_x;
						error_y = error_y >  0.6 ?  0.6 : error_y;
						error_y = error_y < -0.6 ? -0.6 : error_y;
						// float speed_x = error_x * 1.0/(2+5*closeness_count);
						// float speed_y = error_y * 1.0/(2+5*closeness_count);
						// float frequency = 0.1/(5*closeness_count+(1.0/(fabs(error_x)+fabs(error_y)))); //Frequency of foot steps
						// motion_proxy_ptr->setWalkTargetVelocity(speed_x, speed_y, 0.0, frequency);
						// ALMotionProxy::setWalkTargetVelocity(const float& x, const float& y, const float& theta, const float& frequency)
						AL::ALValue walk_config;
						//walk_config.arrayPush(AL::ALValue::array("MaxStepFrequency", frequency));
						//Lower value of step height gives smoother walking
						// std::cout << "y " << message_.y << std::endl;
						if (fabs(message_.y) < 0.10)
						{
							// walk_config.arrayPush(AL::ALValue::array("StepHeight", 0.01));
							// motion_proxy_ptr->post.moveTo(message_.x, 0.0, 0.0, walk_config);
							motion_proxy_ptr->post.moveTo(message_.x, 0.0, 0.0);
							sleep(2.0);
							//motion_proxy_ptr->post.stopMove();
						}
						else
						{
							// walk_config.arrayPush(AL::ALValue::array("StepHeight", 0.005));
							// motion_proxy_ptr->post.moveTo(0.0, 0.0, message_.y/fabs(message_.y)*0.1, walk_config);
							motion_proxy_ptr->post.moveTo(0.0, 0.0, message_.y/fabs(message_.y)*0.1);
							//sleep(3.0);
							//motion_proxy_ptr->post.stopMove();
						}
				}

			}
			set_feedback(RUNNING);
			return 0;
		}
void NaoMarkServiceDetection::MoveHead(float angle)
{
	motionProxy->angleInterpolation("HeadYaw", AL::ALValue(angle), AL::ALValue(1.0f), true);
	motionProxy->stiffnessInterpolation("HeadYaw", 1.0f, 1.0f);
}
void NaoMarkServiceDetection::TurnToDetectedMark(float angle)
{
	motionProxy->moveTo(0,0,angle);
	MoveHead(0);
	_isAllowedToMove = true;
}
std::pair<float, float> worldBallPosFromImgCoords(AL::ALMotionProxy motionProxy,
                                                  std::pair<int, int> ballPosCam,
                                                  int imgWidth, int imgHeight,
                                                  int camera)
{
	std::string cameraName = "CameraTop";
	if (camera == 1)
	{
		cameraName = "CameraBottom";
	}

	// Image coordinates of ball
	int u = ballPosCam.first;
	int v = ballPosCam.second;

	// Angles of observation of the ball
	float phi = ((float)u-((float)imgWidth)/2)/(float)imgWidth * img_WFOV;
	float theta = ((float)v-((float)imgHeight)/2)/(float)imgHeight * img_HFOV;

	// Select the right coordinates for the NAO system!
	// x outward from camera, y to the left and z vertically upwards

	// Coefficients for line-equation going from NAO camera to the ball
	float b_c = -sin(phi);
	float c_c = -sin(theta);
	float a_c = sqrt((cos(phi)*cos(phi)+cos(theta)*cos(theta))/2);

	int space = 2; //FRAME_ROBOT
	bool useSensorValues = true;
	std::vector<float> transVec =
		motionProxy.getTransform(cameraName, space, useSensorValues); // Transform camera -> FRAME_ROBOT
	std::vector<float> cameraPos =
		motionProxy.getPosition(cameraName, space, useSensorValues); // Camera position in FRAME_ROBOT


	// std::cout << "Position of bottom camera: " << std::endl;
	// std::cout << cameraPos.at(0) << " " << cameraPos.at(1) << " " << cameraPos.at(2) << std::endl;
	// std::cout << cameraPos.at(3) << " " << cameraPos.at(4) << " " << cameraPos.at(5) << std::endl;


	// Put the camera transform into an Eigen matrix for easy multiplication
	Eigen::Matrix4f trans;
	trans <<
		transVec[0] , transVec[1] , transVec[2] , transVec[3] ,
		transVec[4] , transVec[5] , transVec[6] , transVec[7] ,
		transVec[8] , transVec[9] , transVec[10], transVec[11],
		transVec[12], transVec[13], transVec[14], transVec[15];

	Eigen::Vector4f vec(a_c, b_c, c_c, 1);

	// Transform the line equation from NAO camera coordinate system into FRAME_ROBOT
	Eigen::Vector4f transformedLine = trans*vec;
	// std::cout << "trans*vec = " << transformedLine << std::endl;

	// Solve line-plane intersection with plane at z (floor)
	// Solution from Wikipedia line-plane intersection article
	float z = 0.00;

	Eigen::Matrix3f lineMat;
	lineMat <<
		cameraPos.at(0)-transformedLine[0], 1.0-0.0, 0.0-0.0,
		cameraPos.at(1)-transformedLine[1], 0.0-0.0, 1.0-0.0,
		cameraPos.at(2)-transformedLine[2], z  -  z, z  -  z;

	Eigen::Vector3f lineVec;
	lineVec << cameraPos.at(0)-0.0, cameraPos.at(1)-0.0, cameraPos.at(2)-z;
	Eigen::Vector3f txy = lineMat.inverse()*lineVec;
	std::cout << "Ball is at (x, y): (" << txy[1] << ", " << txy[2] << ")" << std::endl;
	std::pair<float, float> return_value(txy[1], txy[2]);
	return return_value; //Return ball position (x, y)
}