コード例 #1
0
ファイル: BodyItem.cpp プロジェクト: orikuma/choreonoid
/**
   @return false if the restored state is same as the current state
*/
bool BodyItem::restoreKinematicState(const BodyState& state)
{
    bool modified = false;

    BodyState currentState;
    storeKinematicState(currentState);

    state.getZMP(impl->zmp);
    state.restorePositions(*impl->body);

    //cout << "(currentState == state):" << (currentState == state) << endl;
    //return (currentState == state);
    return true;
}
コード例 #2
0
ファイル: ApplyIMUToState.cpp プロジェクト: OSVR/OSVR-Core
    inline void applyAngVelToState(TrackingSystem const &sys, BodyState &state,
                                   BodyProcessModel &processModel,
                                   CannedIMUMeasurement const &meas) {

        Eigen::Vector3d angVel;
        meas.restoreAngVel(angVel);
        Eigen::Vector3d var;
        meas.restoreAngVelVariance(var);
#if 0
        static const double dt = 0.02;
        /// Rotate it into camera space - it's bTb and we want cTc
        /// @todo do this without rotating into camera space?
        Eigen::Quaterniond cTb = state.getQuaternion();
        // Eigen::Matrix3d bTc(state.getQuaternion().conjugate());
        /// Turn it into an incremental quat to do the space transformation
        Eigen::Quaterniond incrementalQuat =
            cTb * angVelVecToIncRot(angVel, dt) * cTb.conjugate();
        /// Then turn it back into an angular velocity vector
        angVel = incRotToAngVelVec(incrementalQuat, dt);
        // angVel = (getRotationMatrixToCameraSpace(sys) * angVel).eval();
        /// @todo transform variance?

        kalman::AngularVelocityMeasurement<BodyState> kalmanMeas{angVel, var};
        kalman::correct(state, processModel, kalmanMeas);
#else
        kalman::IMUAngVelMeasurement kalmanMeas{angVel, var};
        auto correctionInProgress =
            kalman::beginUnscentedCorrection(state, kalmanMeas);
        auto outputMeas = [&] {
            std::cout << "state: " << state.angularVelocity().transpose()
                      << " and measurement: " << angVel.transpose();
        };
        if (!correctionInProgress.stateCorrectionFinite) {
            std::cout
                << "Non-finite state correction in applying angular velocity: ";
            outputMeas();
            std::cout << "\n";
            return;
        }
        if (!correctionInProgress.finishCorrection(true)) {
            std::cout << "Non-finite error covariance after applying angular "
                         "velocity: ";
            outputMeas();
            std::cout << "\n";
        }
#endif
    }
コード例 #3
0
ファイル: ApplyIMUToState.cpp プロジェクト: 6DB/OSVR-Core
    void applyIMUToState(TrackingSystem const &sys,
                         util::time::TimeValue const &initialTime,
                         BodyState &state, BodyProcessModel &processModel,
                         util::time::TimeValue const &newTime,
                         CannedIMUMeasurement const &meas) {
        if (newTime != initialTime) {
            auto dt = osvrTimeValueDurationSeconds(&newTime, &initialTime);
            kalman::predict(state, processModel, dt);
            state.externalizeRotation();
        }
        if (meas.orientationValid()) {
            applyOriToState(sys, state, processModel, meas);
        } else if (meas.angVelValid()) {
            applyAngVelToState(sys, state, processModel, meas);

        } else {
            // unusually, the measurement is totally invalid. Just normalize and
            // go on.
            state.postCorrect();
        }
    }
コード例 #4
0
void TFCallBack(tf2_msgs::TFMessage msg){
	for(int i=0;i<msg.transforms.size();i++){
		if(!(tracking<1)){
			for(int i2=0;i2<15;i2++){
				std::ostringstream checking;
				checking<<"/tracker/user_"<<tracking<<"/"<<bodyParts[i2];
				if(msg.transforms[i].child_frame_id==checking.str()){
					stateTracker.updateSkeletonData(i2, msg.transforms[i].transform.translation.x, msg.transforms[i].transform.translation.y, msg.transforms[i].transform.translation.z);
					//std::cout<<"Recieved good data "<<i2<<std::endl;
				}
			}
		}
		more=(tracking==-1?false:true);
		if(!more)
			justChanged=true;
	}
}
コード例 #5
0
ファイル: ApplyIMUToState.cpp プロジェクト: 6DB/OSVR-Core
    inline void applyAngVelToState(TrackingSystem const &sys, BodyState &state,
                                   BodyProcessModel &processModel,
                                   CannedIMUMeasurement const &meas) {

        Eigen::Vector3d angVel;
        meas.restoreAngVel(angVel);
        Eigen::Vector3d var;
        meas.restoreAngVelVariance(var);

        /// Rotate it into camera space - it's bTb and we want cTc
        /// @todo do this without rotating into camera space?
        Eigen::Quaterniond cTb = state.getQuaternion();
        //Eigen::Matrix3d bTc(state.getQuaternion().conjugate());
        Eigen::Quaterniond incrementalQuat = cTb * util::quat_exp_map(angVel).exp() *
            cTb.conjugate();
        angVel = util::quat_exp_map(incrementalQuat).ln();
        // angVel = (getRotationMatrixToCameraSpace(sys) * angVel).eval();
        /// @todo transform variance?

        kalman::AngularVelocityMeasurement<BodyState> kalmanMeas{angVel, var};
        kalman::correct(state, processModel, kalmanMeas);
    }
コード例 #6
0
ファイル: test-zebulon.cpp プロジェクト: ahornung/mpc-walkgen
int main() {
  // Logging:
  // --------
  const int nbFile=7;

  std::vector<std::string> name_vec(nbFile);
  name_vec[0]="CoM_X";
  name_vec[1]="CoM_Y";
  name_vec[2]="CoM_Yaw";
  name_vec[3]="CoP_X";
  name_vec[4]="CoP_Y";
  name_vec[5]="BAS_X";
  name_vec[6]="BAS_Y";

  std::vector<std::ofstream*> data_vec(nbFile);
  std::vector<std::ifstream*> ref_vec(nbFile);
  for (int i = 0; i < nbFile; ++i) {
    data_vec[i] = new std::ofstream((name_vec[i]+".data").c_str());
    ref_vec[i] = new std::ifstream((name_vec[i]+".ref").c_str());
  }



  // Create and initialize generator:
  // -------------------------------

  WalkgenAbstract * walk = createWalkgen(CURRENT_QPSOLVERTYPE);

  MPCData mpcData;
  mpcData.weighting.basePositionInt[0]=0;
  mpcData.weighting.basePosition[0]=10;
  walk->init(mpcData);



  EnvData envData;
  Eigen::VectorXd obstacleRadius(3);
  Eigen::VectorXd obstaclePositionX(3);
  Eigen::VectorXd obstaclePositionY(3);
  obstacleRadius(0) = 1;
  obstaclePositionX(0) = 2;
  obstaclePositionY(0) = -0.2;
  obstacleRadius(1) = 0.5;
  obstaclePositionX(1) = 4;
  obstaclePositionY(1) = 0.1;
  obstacleRadius(2) = 0.5;
  obstaclePositionX(2) = 5;
  obstaclePositionY(2) = -0.3;
  envData.obstacleRadius = obstacleRadius;
  envData.obstaclePositionX = obstaclePositionX;
  envData.obstaclePositionY = obstaclePositionY;
  envData.nbObstacle = 3;


  // Run:
  // ----

  BodyState state = walk->bodyState(COM);

  state.x(1) += 0;
  state.x(2) += 0;
  walk->bodyState(COM, state);

  double velocity = 0.5;
  walk->velReferenceInGlobalFrame(velocity, 0, 0);
  walk->posReferenceInGlobalFrame(0, 0, 0);
  walk->online(0.0);
  double t = 0;
  walk->online(t);
  MPCSolution result;

  Eigen::VectorXd position(10);
  Eigen::VectorXd zero(10);
  Eigen::VectorXd basePos(20);
  basePos.fill(0);
  zero.fill(0);

  for (t += 0.001; t < 45; t += 0.001){

    for(int i=0; i<10; ++i)
    {
      position(i)=0.16*(i+1)*velocity+result.state_vec[1].baseTrajX_(0);
    }

    walk->posReferenceInGlobalFrame(position, zero, zero);
    result = walk->online(t);
    Eigen::VectorXd basePos;
    walk->QPBasePosition(basePos);
    envData.obstacleLinearizationPointX = basePos.segment(0, 10);
    envData.obstacleLinearizationPointY = basePos.segment(10, 10);
    walk->envData(envData);


    dumpTrajectory(result, data_vec);
  }

  // Reopen the files:
  // -----------------
  std::vector<std::ifstream*> check_vec(nbFile);
  for(int i = 0;i < nbFile; ++i){
    check_vec[i] = new std::ifstream((name_vec[i]+".data").c_str());
  }
  bool success = ((fabs(result.state_vec[2].baseTrajX_(0)-velocity)<1e-4)
      &&(fabs(result.state_vec[2].baseTrajY_(0)-0)<1e-4)
      &&(fabs(result.state_vec[2].CoMTrajYaw_(0)-0)<1e-4));
  for(unsigned i = 0; i < check_vec.size();++i){
    // if the reference file exists, compare with the previous version.
    if (*ref_vec[i]){
      if (!checkFiles(*check_vec[i],*ref_vec[i])){
        //success = false;
      }
    }
    // otherwise, create it
    else{
      copyFile((name_vec[i]+".data"), (name_vec[i]+".ref"));
    }
    check_vec[i]->close();
    ref_vec[i]->close();
  }
  makeScilabFile("data", t);
  makeScilabFile("ref", t);

  for (int i=0; i < nbFile; ++i) {
    delete check_vec[i];
    delete ref_vec[i];
    delete data_vec[i];
  }
  delete walk;
  return (success)?0:1;
}
コード例 #7
0
ファイル: BodyItem.cpp プロジェクト: orikuma/choreonoid
void BodyItem::storeKinematicState(BodyState& state)
{
    state.storePositions(*impl->body);
    state.setZMP(impl->zmp);
}
コード例 #8
0
int main(int argc, char **argv)
{
	//So, for some odd reason involving the initialization of static non-copyable data, i can not get GUI to hold it's own RenderWindow
	//So i'm going to bite the bullet and just do it here in this class... :P
	gui.init();

	//Create my BodyState tracker
	stateTracker=BodyState();

	//Set up some ROS stuff
	ros::init(argc, argv, "mirror");
	ros::NodeHandle n;

	//First my subscribers
	ros::Subscriber myoIMU_listener = n.subscribe("/myo/imu",1000, myoIMUCallBack);
	ros::Subscriber myoGesture_listener = n.subscribe("/myo/gesture",1000, myoGestureCallBack);
	ros::Subscriber myoOnboardGesture_listener = n.subscribe("/myo/onboardGesture",1000, myoOnboardGestureCallBack);
	ros::Subscriber left_eye_watcher = n.subscribe("/multisense/left/image_rect/compressed",1000, leftEyeCallBack);
	ros::Subscriber right_eye_watcher = n.subscribe("/multisense/right/image_rect/compressed",1000, rightEyeCallBack);
	ros::Subscriber joint_listener = n.subscribe("/ihmc_ros/valkyrie/output/joint_states", 1000, jointStateCallBack);
	ros::Subscriber transform_listener = n.subscribe("/tf", 1000, TFCallBack);
	ros::Subscriber people_listener = n.subscribe("/people", 1000, peopleCallBack);

	//Now my publishers
	ros::Publisher arm_controller = n.advertise<ihmc_msgs::ArmTrajectoryRosMessage>("/ihmc_ros/valkyrie/control/arm_trajectory", 1000);
	ros::Publisher neck_controller = n.advertise<ihmc_msgs::NeckTrajectoryRosMessage>("/ihmc_ros/valkyrie/control/neck_trajectory", 1000);
	ros::Publisher pelvis_height_controller = n.advertise<ihmc_msgs::PelvisHeightTrajectoryRosMessage>("/ihmc_ros/valkyrie/control/pelvis_height_trajectory", 1000);
	ros::Publisher chest_controller = n.advertise<ihmc_msgs::ChestTrajectoryRosMessage>("/ihmc_ros/valkyrie/control/chest_trajectory", 1000);
	ros::Publisher go_home_pub = n.advertise<ihmc_msgs::GoHomeRosMessage>("/ihmc_ros/valkyrie/control/go_home",1000);

	//Some SFML stufsf::RenderWindow windowf
	sf::RenderWindow window;
	if(gui.isOcculusOn){
		sf::RenderWindow window(sf::VideoMode(2248, 544), "SFML works!");
		sf::CircleShape shape(100.f);
		shape.setFillColor(sf::Color::Green);
	}
	//Now the primary loop
	ros::Rate loop_rate(6);
	ihmc_msgs::GoHomeRosMessage goHomeMsg;
	goHomeMsg.trajectory_time=3;
	goHomeMsg.unique_id=14;
	long int end, start;
	while (ros::ok()){
		struct timeval tp;
		gettimeofday(&tp, NULL);
		long int start = tp.tv_sec * 1000 + tp.tv_usec / 1000;
		//std::cout<<tracking<<td::endl;
		//Some more SFML stuff
		if(GUIwindow.isOpen()){
			//TODO:add in some human data gathering and print out human position and robot position data
			//Also, add a table for Myo stuff
			pollEventsGUI();
			updateGUI();
		}
		//std::cout<<lastLeftEye.height<<" "<<lastLeftEye.width<<" "<<lastLeftEye.encoding<<" "<<lastLeftEye.step<<" "<<lastLeftEye.data.size()<<" "<<1024*544*3<<std::endl;
		if(gui.isOcculusOn && window.isOpen() && lastLeftEye.data.size()!=0 && lastRightEye.data.size()!=0){
			sf::Event event;
			while (window.pollEvent(event))
			{
				if (event.type == sf::Event::Closed)
					window.close();
			}

			sf::Uint8* pixels = new sf::Uint8[2248 * 544 * 4];
			sf::Image image,imageL,imageR;
			sf::Uint8* Ldata=new sf::Uint8[(size_t)lastLeftEye.data.size()];
			sf::Uint8* Rdata=new sf::Uint8[(size_t)lastRightEye.data.size()];
			for(int i=0;i<(size_t)lastLeftEye.data.size();i++){
				Ldata[i]=lastLeftEye.data[i];
			}
			for(int i=0;i<(size_t)lastRightEye.data.size();i++){
				Rdata[i]=lastRightEye.data[i];
			}
			imageL.loadFromMemory(Ldata,(size_t)lastLeftEye.data.size());
			imageR.loadFromMemory(Rdata,(size_t)lastRightEye.data.size());
			delete Ldata;
			delete Rdata;
			if(imageL.getSize().x!=imageR.getSize().x || imageL.getSize().y!=imageR.getSize().y)
				std::cout<<"imageL:("<<imageL.getSize().x<<","<<imageL.getSize().y<<") imageR:("<<imageR.getSize().x<<","<<imageR.getSize().y<<")"<<std::endl;
			sf::Texture texture;
			int max_x=imageL.getSize().x;
			int max_y=imageL.getSize().y;
			for(int y = 0; y <max_y; y++)
			{
				for(int x = 0; x < max_x; x++)
				{
					pixels[(y*(2*max_x+200)+x)*4]	 = imageL.getPixelsPtr()[((max_y-y)*max_x+x)*4]; // R?
					pixels[(y*(2*max_x+200)+x)*4 + 1] = imageL.getPixelsPtr()[((max_y-y)*max_x+x)*4+1]; // G?
					pixels[(y*(2*max_x+200)+x)*4 + 2] = imageL.getPixelsPtr()[((max_y-y)*max_x+x)*4+2]; // B?
					pixels[(y*(2*max_x+200)+x)*4 + 3] = 255; // A?
				}
				for(int x=1024;x<1224;x++){
					pixels[(y*(2*max_x+200)+x)*4]	 = 0; // R?
					pixels[(y*(2*max_x+200)+x)*4 + 1] = 0; // G?
					pixels[(y*(2*max_x+200)+x)*4 + 2] = 0; // B?
					pixels[(y*(2*max_x+200)+x)*4 + 3] = 255; // A?
				}
				for(int x = (max_x+200); x < (2*max_x+200); x++)
				{
					pixels[(y*(2*max_x+200)+x)*4]	 = imageR.getPixelsPtr()[((max_y-y)*max_x+x-(max_x+200))*4]; // R?
					pixels[(y*(2*max_x+200)+x)*4 + 1] = imageR.getPixelsPtr()[((max_y-y)*max_x+x-(max_x+200))*4+1]; // G?
					pixels[(y*(2*max_x+200)+x)*4 + 2] = imageR.getPixelsPtr()[((max_y-y)*max_x+x-(max_x+200))*4+2]; // B?
					pixels[(y*(2*max_x+200)+x)*4 + 3] = 255; // A?
				}
			}
			window.clear();
			image.create(2248, 544, pixels);
			texture.create(2248, 544);
			texture.update(image);
			sf::Sprite sprite;
			sprite.setTexture(texture);
			window.draw(sprite);
			window.display();
			delete pixels;
		}
		if(gui.goHomeCount==12){
			goHomeMsg.body_part=0;
			goHomeMsg.robot_side=0;
			go_home_pub.publish(goHomeMsg);
			std::cout<<"just published goHome LEFT_ARM"<<std::endl;
			for(int i=0;i<6;i++){
				ros::spinOnce();
				loop_rate.sleep();
			}
			goHomeMsg.robot_side=1;
			go_home_pub.publish(goHomeMsg);
			std::cout<<"just published goHome RIGHT_ARM"<<std::endl;
			for(int i=0;i<6;i++){
				ros::spinOnce();
				loop_rate.sleep();
			}
			goHomeMsg.body_part=1;
			go_home_pub.publish(goHomeMsg);
			std::cout<<"just published goHome TORSO"<<std::endl;
			for(int i=0;i<6;i++){
				ros::spinOnce();
				loop_rate.sleep();
			}
			goHomeMsg.body_part=2;
			go_home_pub.publish(goHomeMsg);
			std::cout<<"just published goHome PELVIS"<<std::endl;
			gui.startGoingHome=false;
			gui.goHomeCount--;
			ros::spinOnce();
			loop_rate.sleep();
			continue;
			/*
			moveSpeedOverRide=3;
			arm_controller.publish(moveArm(JRIGHT,stateTracker.getArmHomeAngles()));
			arm_controller.publish(moveArm(JLEFT,stateTracker.getArmHomeAngles()));
			chest_controller.publish(moveChest(stateTracker.getChestHomeAngles()));
			neck_controller.publish(moveHead(stateTracker.getNeckHomeAngles()));
			pelvis_height_controller.publish(movePelvisHeight(stateTracker.getPelvisHeightHomeAngles()));
			moveSpeedOverRide=-1;*/
		}
		if(!more || justChanged || (gui.goHomeCount>0 && gui.goHomeCount<12)){
			//std::cout<<gui.goHomeCount<<" "<<more<<" "<<justChanged<<std::endl;
			if(justChanged && more)
				justChanged=false;
			/*
			ihmc_msgs::GoHomeRosMessage msg;
			msg.trajectory_time=3;
			msg.unique_id=5;
			go_home_pub.publish(msg);
			msg.robot_side=1;
			go_home_pub.publish(msg);*/
			if(gui.goHomeCount>0)
				gui.goHomeCount--;
			ros::spinOnce();
			loop_rate.sleep();
			continue;
		}
		std::vector<double> out;
		out.push_back((tp.tv_sec * 1000 + tp.tv_usec / 1000)-end);//oh wow, this is terrible programming practice... dang man
		if(end!=0)
			gui.record("*: ",out);

		//gui.gesture==0 acts as an unlock feature
		if(gui.isRightArmOn && gui.gesture==1)
			arm_controller.publish(moveArm(JRIGHT,stateTracker.getRightArmAngles()));
		else if(gui.isRightArmOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			arm_controller.publish(moveArm(JRIGHT,stateTracker.getArmHomeAngles()));
			moveSpeedOverRide=-1;
			moveArm(JRIGHT,stateTracker.getRightArmAngles(),false);
		}

		if(gui.isLeftArmOn && gui.gesture==1)
			arm_controller.publish(moveArm(JLEFT,stateTracker.getLeftArmAngles()));
		else if(gui.isLeftArmOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			arm_controller.publish(moveArm(JLEFT,stateTracker.getArmHomeAngles()));
			moveSpeedOverRide=-1;
			//moveArm(JLEFT,stateTracker.getLeftArmAngles(),false);
		}
		if(gui.isChestOn && gui.gesture==1)
			chest_controller.publish(moveChest(stateTracker.getChestAngles()));
		else if(gui.isChestOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			chest_controller.publish(moveChest(stateTracker.getChestHomeAngles()));
			moveSpeedOverRide=-1;
			//moveChest(stateTracker.getChestAngles(),false);
		}
		if(gui.isHeadOn && gui.gesture==1)
			neck_controller.publish(moveHead(stateTracker.getNeckAngles()));
		else if(gui.isHeadOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			neck_controller.publish(moveHead(stateTracker.getNeckHomeAngles()));
			moveSpeedOverRide=-1;
			//moveHead(stateTracker.getNeckAngles(),false);
		}
		if(gui.isPelvisOn && gui.gesture==1)
			pelvis_height_controller.publish(movePelvisHeight(stateTracker.getStandingHeight()));
		else if(gui.isPelvisOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			pelvis_height_controller.publish(movePelvisHeight(stateTracker.getPelvisHeightHomeAngles()));
			moveSpeedOverRide=-1;
			//movePelvisHeight(stateTracker.getStandingHeight(),false);
		}

		ros::spinOnce();
		gettimeofday(&tp, NULL);
		long int start2 = tp.tv_sec * 1000 + tp.tv_usec / 1000;
		loop_rate.sleep();
		gettimeofday(&tp, NULL);
		end = tp.tv_sec * 1000 + tp.tv_usec / 1000;
		std::cout<<end-start2<<std::endl;
	}
	go_home_pub.publish(goHomeMsg);
	goHomeMsg.robot_side=1;
	go_home_pub.publish(goHomeMsg);
	return 0;
}