/*
  Modify this implementation to pick GREEN_BOX_0 and place it in the GREEN_CONTAINER_0, grasp RED_BOX_0 and place it in the RED_CONTAINER_0

  Hint: Use the 'pick' and 'place' functions you implemented.

*/
int main(int argc, char** argv)
{
    try
    {
        ros::init(argc, argv, "demo_cogsys_viz_cpp");

        cv::Mat demoImg(480,640, CV_8UC3);
        cv::namedWindow("demo", 1);

        demoImg=cv::Scalar(0,0,0);

        cv::putText(demoImg, "Press ESC to quit app", cv::Point(30,30), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, CV_AA);
        cv::putText(demoImg, "Press g to pick GREEN ball and place it in GREEN container", cv::Point(30,70), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, CV_AA);
        cv::putText(demoImg, "Press r to pick RED ball and place it in RED container", cv::Point(30,70), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, CV_AA);
        cv::putText(demoImg, "Press p to pick GREEN ball", cv::Point(30,70), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, CV_AA);
        cv::putText(demoImg, "Press m to turn on mic", cv::Point(30,70), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, CV_AA);

        int k;

        ros::NodeHandle n;

        world_model_client = n.serviceClient<world_model_srvs::GetTransform>("/world_model/get_transform");

        /*creating the client for moving the cambot to a given cartesian pose*/
        gripperbot_client = n.serviceClient<robot_control_srvs::MoveToCS>("/gripperbot_control/move_to_cs");
        cambot_client = n.serviceClient<robot_control_srvs::MoveToCS>("/cambot_control/move_to_cs");
        opengripper_client = n.serviceClient<gripper_control_srvs::OpenGripper>("/gripper_control/open_gripper");
        closegripper_client = n.serviceClient<gripper_control_srvs::CloseGripper>("/gripper_control/close_gripper");

        sub_speech = n.subscribe("/speech_recognition/output", 100, analyzeSpeech);
        mic_on_client = n.serviceClient<speech_recognition_srvs::TurnOn>("/speech_recognition/turn_on");
        mic_off_client = n.serviceClient<speech_recognition_srvs::TurnOff>("/speech_recognition/turn_off");

        /*move the cambot to the position, where the bag file was recorded from*/
        movecambot(0.15,0,0.45);

        tf::TransformListener listener;

        bool pickPlaceGreen = false;
        bool pickPlaceRed = false;
        bool graspObject = false;

        while(ros::ok())
        {
            if(graspObject)
            {
                std::string type="BOX";
                std::string prop="GREEN";
                int id=0;
                std::stringstream ss_object_name;
                ss_object_name << prop << "_" << type << "_" << id;

                tf::StampedTransform stamped_transform;
                try{
                    /* get the pose of the object w.r.t gripperbot_base frame */
                    listener.waitForTransform(ss_object_name.str(), "gripperbot_base",
                                              ros::Time(0), ros::Duration(1.0));
                    listener.lookupTransform(ss_object_name.str(), "gripperbot_base",
                                             ros::Time(0), stamped_transform);
                    float x,y,z;
                    x = stamped_transform.getOrigin().getX();
                    y = stamped_transform.getOrigin().getY();
                    z = stamped_transform.getOrigin().getZ();
                    /*checking for invalid/unreachable/noisy pose estimated from the camera*/
                    if(z < 0) z = -z;
                    if(x < 0) continue;
                    /* move the gripperbot using the obtained object position */
                    grasp(x,y,z);
                }
                catch (tf::TransformException ex){
                    std::cout << "NO " << prop  << " " << type << "found to grasp" << std::endl;
                    ros::Duration(1.0).sleep();
                }
                graspObject = false;
            }
            if(pickPlaceGreen)
            {
                pick("GREEN_BOX_0");
                place("GREEN_CONTAINER_0");
                pickPlaceGreen = false;
                pickPlaceRed = true;
            }
            if(pickPlaceRed)
            {
                pick("RED_BOX_0");
                place("RED_CONTAINER_0");
                pickPlaceRed = false;
                pickPlaceGreen = true;
            }

            imshow("demo",  demoImg);

            k = cv::waitKey(100);

            if((int)(k & 0xFF) == 27)
                break;

            if((int)(k & 0xFF) == 'p')
                graspObject=true;

            if((int)(k & 0xFF) == 'g')
                pickPlaceGreen=true;

            if((int)(k & 0xFF) == 'r')
                pickPlaceRed=true;

            if((int)(k & 0xFF) == 'm')
                turn_mic_on();

            ros::spinOnce();
        }

        demoImg.release();

        cv::destroyWindow("demoImg");

        cambot_client.shutdown();
        gripperbot_client.shutdown();
        opengripper_client.shutdown();
        closegripper_client.shutdown();
        world_model_client.shutdown();
        mic_on_client.shutdown();
        mic_off_client.shutdown();

        return 1;

    }
    catch (std::exception& e)
    {

        std::cout << e.what() << endl;
    }

    return 0;
};
示例#2
0
 void TearDown()
 {
   load_srv_.shutdown();
 }
示例#3
0
int main(int argc, char** argv)
{
    try
    {
        ros::init(argc, argv, "demo_cogsys_cpp");

        cv::Mat demoImg(480,640, CV_8UC3);
        cv::namedWindow("demo", 1);

	demoImg=cv::Scalar(0,0,0);

	cv::putText(demoImg, "Press ESC to quit app", cv::Point(30,30), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, CV_AA);
    cv::putText(demoImg, "Press g to grasp GREEN object", cv::Point(30,70), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, CV_AA);
	cv::putText(demoImg, "Please implement your own activation login for grasping", cv::Point(30,100), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, 	CV_AA);
	cv::putText(demoImg, "using natural language (pressing 'g' is only for testing)", cv::Point(30,130), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, CV_AA);
    cv::putText(demoImg, "Press m to activate microphone", cv::Point(30,160), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0,255,0), 1, CV_AA);

        mSub = new comminterface::ActorsSubscriber("/object_detector/objects_data");
        mSub->start();

        int k;

	ros::NodeHandle n;

        gripperbot_client = n.serviceClient<robot_control_srvs::MoveToOS>("/gripperbot_control/move_to_cs");
        opengripper_client = n.serviceClient<gripper_control_srvs::OpenGripper>("/gripper_control/open_gripper");
        closegripper_client = n.serviceClient<gripper_control_srvs::CloseGripper>("/gripper_control/close_gripper");

        sub_speech = n.subscribe("/re_speech_recognition/output", 1, analyzeSpeech);
        mic_on_client = n.serviceClient<speech_recognition_srvs::TurnOn>("/re_speech_recognition/turn_on");
        mic_off_client = n.serviceClient<speech_recognition_srvs::TurnOff>("/re_speech_recognition/turn_off");

	home();

    bool graspObject = false;
        while(true)
        {

        if(graspObject)
		{
            grasp("BOX", "GREEN");
			graspObject=false;
        }

		imshow("demo",  demoImg);

		k = cv::waitKey(100);

		if((int)(k & 0xFF) == 27)
			break;

        if((int)(k & 0xFF) == 'g')
            graspObject=true;

        if((int)(k & 0xFF) == 'm')
            turn_mic_on();
        }

        demoImg.release();

        mSub->stopThread();

        cv::destroyWindow("demoImg");

	gripperbot_client.shutdown();
	opengripper_client.shutdown();
	closegripper_client.shutdown();
    mic_on_client.shutdown();
    mic_off_client.shutdown();
    delete mSub;

        return 1;

    }
    catch (std::exception& e)
    {

        std::cout << e.what() << endl;
    }

    return 0;
};