void MainWindow::mouseMoved(int x, int y)
{

	// Get number the number of pixel between the last
	// und current mouse position
	int dx = x - m_mouseX;
	int dy = y - m_mouseY;

	// Check which button was pressend and apply action
	if(m_button == GLUT_LEFT_BUTTON)
	{
		moveXY(dx, dy);
	}

	if(m_button == GLUT_RIGHT_BUTTON)
	{
		moveHead(dx, dy);
	}

	if(m_button == GLUT_MIDDLE_BUTTON)
	{
		moveZ(dy);
	}

	// Transform viewport
	m_cam.apply();

	// Save new coodinates
	m_mouseX = x;
	m_mouseY = y;

}
void Controller::returnToOriginalPosition()
{
	moveHead(0.0, 0.0);

	mLock = LOCK_PATH;
	moveBase(mOriginalPosition.pose);
	waitForLock();
}
Exemple #3
0
bool AngularMotor::allJointsOff(){
	effect.str("");
	moveArm(right_side, 0, 0, 0, 0);
	moveArm(left_side, 0, 0, 0, 0);
	moveLeg(right_side, 0, 0, 0, 0, 0, 0);
	moveLeg(left_side, 0, 0, 0, 0, 0, 0);
	moveHead(0, 0);
	return true;
}
Exemple #4
0
/*******************************************************************************
 * Description: moves the snake one step, based on the input direction
 * 
 * Inputs: int direction
 * 
 * Returns: void
 ******************************************************************************/
void advanceSnake( int direction )
{
  if( direction == 4 || direction == -1 )
  { // continue the same direction as we are going
    direction = PREV_DIRECTION;
  }
  else 
  {
    PREV_DIRECTION = direction;
  }

  /* clear the snake */
  int i;
  for( i = 0; i < theSnake.length; i++) 
  {
    mvaddch( theSnake.body[i].y,
             theSnake.body[i].x,
             EMPTY_CHAR );
  }

  /* move the head to the corresponding location */
  struct unit lastTail = theSnake.body[theSnake.length-1];
  
  struct unit tempSnake[MAX_LENGTH];

  /* shift the rest of the snake */
  for( i = 0; i < theSnake.length; i++) 
  {
    tempSnake[i] = theSnake.body[i];
  }
  for( i = 1; i < theSnake.length; i++) 
  {
    theSnake.body[i] = tempSnake[i-1];
  }

  moveHead( direction );

  /* draw the new snake. draw it backwards so the head is always on top */
  for( i = 1; i < theSnake.length; i++ )
  {
    mvaddch( theSnake.body[i].y,
             theSnake.body[i].x,
             TAIL_CHAR );
  }
  mvaddch( theSnake.body[0].y, theSnake.body[0].x, theSnake.body[0].symbol );

  if( gotGoodie )
  { /* append another tail to the snake */
    theSnake.length++;
    theSnake.body[theSnake.length-1] = lastTail;
    mvaddch( lastTail.y, lastTail.x, lastTail.symbol);
    placeGoodie(); // place a new goodie
    score++;
    mvprintw( MAX_ROW-1, 0, "Score: %d", score );
    gotGoodie = false;
  }
}
/**
 * Respond to the user surprised
 */
uint8_t Controller::respond()
{
	mBusy = true;
	moveHead(HEAD_INIT_X, HEAD_INIT_Z);
	setFocusFace(true);

	mBusy = false;
	respondedSurprised = true;
	return nero_msgs::Emotion::SURPRISED;
}
void UserController::choice_movement(std::string room)
{
	int task_right = 0;
	int task_left = 0;
	int task_head = 0;
	int task_body = 0;

	if (room == "lobby")
	{
		task_left = 0;
		task_right = 1;
		task_head = 2;
		task_body = 0;
	}
	else if (room == "kitchen")
	{
		task_left = 2;
		task_right = 0;
		task_head = 3;
		task_body = 0;
	}
	else if (room == "living room")
	{
		task_left = 1;
		task_right = 0;
		task_head = 4;
		task_body = 0;
	}
	else if (room == "bed room")
	{
		task_left = 0;
		task_right = 2;
		task_head = 1;
		task_body = 0;
	}
	else if (room == "finish")
	{
		task_left = 0;
		task_right = 0;
		task_head = 0;
		task_body = 1;
	}
	else
	{
		task_left = 0;
		task_right = 0;
		task_head = 0;
		task_body = 0;
	}

	moveLeftArm(task_left);
	moveRightArm(task_right);
	moveHead(task_head);
	moveBody(task_body);
}
Exemple #7
0
int CBget(CB_INSTANCE* buffer, CB_BYTE_PTR dst, int size) {
	pthread_mutex_lock(&buffer->mutex);
	int sizeFetch = 0;

	if(buffer->head == buffer->tail) {
		pthread_mutex_unlock(&buffer->mutex);
		return sizeFetch;
	}

	if(buffer->head > buffer->tail) {
		int sizeA = buffer->end - buffer->head;
		sizeA = size < sizeA ? size : sizeA;

		memcpy(dst, buffer->head, sizeA);
		dst += sizeA;
		moveHead(buffer, sizeA);
		size -= sizeA;
		sizeFetch += sizeA;

		if(buffer->head == buffer->tail) {
			pthread_mutex_unlock(&buffer->mutex);
			return sizeFetch;
		}
	}

	if(size) {
		int sizeB = buffer->tail - buffer->head;
		sizeB = size < sizeB ? size : sizeB;

		memcpy(dst, buffer->head, sizeB);
		moveHead(buffer, sizeB);
		sizeFetch += sizeB;
	}

#ifdef __CB_LOG
	printf("0x%08x : GET 0x%08x - 0x%08x\n", buffer->begin, buffer->head, buffer->tail);
#endif
	pthread_mutex_unlock(&buffer->mutex);
	return sizeFetch;
}
/**
 * Moves the base to a given goal
 */
void Controller::moveBase(geometry_msgs::Pose &goal)
{
	//Make sure that setMode() for base is unlocked
	std_msgs::Bool bool_msg;
	bool_msg.data = false;
	stopPublisher->publish(bool_msg);

	setFocusFace(false);
	moveHead(0.0, 0.0);

	ROS_INFO("Publishing path goal.");
	geometry_msgs::PoseStamped stamped_goal;
	stamped_goal.pose = goal;
	stamped_goal.header.frame_id = "/map";
	stamped_goal.header.stamp = ros::Time::now();
	mBaseGoalPublisher.publish(stamped_goal);
}
Exemple #9
0
int moveInfoContainsMove( MoveInfo *theMoveInfo, Move *theMove )
{
    Move *aMove;

    if( IsListEmpty( &theMoveInfo->mi_Moves ))
        return FALSE;

    for( aMove = moveHead( &theMoveInfo->mi_Moves );
        aMove != moveSucc( moveTail( &theMoveInfo->mi_Moves ));
        aMove = moveSucc( aMove ))
    {
        if(( theMove->mv_SourceStack == aMove->mv_SourceStack )&&
           ( theMove->mv_DestStack == aMove->mv_DestStack ))
            return TRUE;
    }

    return FALSE;
}
Exemple #10
0
IK_GazeResult::List Gaze::execute(bool move_head, bool move_leye, bool move_reye, bool simulate)
{
	if(!m_targetIsSet)
		return IK_GazeResult::GAZE_ERROR;
	
	int result_head = 0;
	int result_eyes = 0;

	if(move_head)
		if( !moveHead(simulate) )
			result_head = 1;

	if(move_leye)
		if( !moveEye(m_leye, m_aux_leye, simulate) )
			result_eyes = 1;

	if(move_reye)
		if( !moveEye(m_reye, m_aux_reye, simulate) )
			result_eyes = 1;
	
	m_targetIsSet = false;
	return (IK_GazeResult::List)(1 + result_eyes + 2*result_head);
}
int main(int argc, char **argv)
{
	//So, for some odd reason involving the initialization of static non-copyable data, i can not get GUI to hold it's own RenderWindow
	//So i'm going to bite the bullet and just do it here in this class... :P
	gui.init();

	//Create my BodyState tracker
	stateTracker=BodyState();

	//Set up some ROS stuff
	ros::init(argc, argv, "mirror");
	ros::NodeHandle n;

	//First my subscribers
	ros::Subscriber myoIMU_listener = n.subscribe("/myo/imu",1000, myoIMUCallBack);
	ros::Subscriber myoGesture_listener = n.subscribe("/myo/gesture",1000, myoGestureCallBack);
	ros::Subscriber myoOnboardGesture_listener = n.subscribe("/myo/onboardGesture",1000, myoOnboardGestureCallBack);
	ros::Subscriber left_eye_watcher = n.subscribe("/multisense/left/image_rect/compressed",1000, leftEyeCallBack);
	ros::Subscriber right_eye_watcher = n.subscribe("/multisense/right/image_rect/compressed",1000, rightEyeCallBack);
	ros::Subscriber joint_listener = n.subscribe("/ihmc_ros/valkyrie/output/joint_states", 1000, jointStateCallBack);
	ros::Subscriber transform_listener = n.subscribe("/tf", 1000, TFCallBack);
	ros::Subscriber people_listener = n.subscribe("/people", 1000, peopleCallBack);

	//Now my publishers
	ros::Publisher arm_controller = n.advertise<ihmc_msgs::ArmTrajectoryRosMessage>("/ihmc_ros/valkyrie/control/arm_trajectory", 1000);
	ros::Publisher neck_controller = n.advertise<ihmc_msgs::NeckTrajectoryRosMessage>("/ihmc_ros/valkyrie/control/neck_trajectory", 1000);
	ros::Publisher pelvis_height_controller = n.advertise<ihmc_msgs::PelvisHeightTrajectoryRosMessage>("/ihmc_ros/valkyrie/control/pelvis_height_trajectory", 1000);
	ros::Publisher chest_controller = n.advertise<ihmc_msgs::ChestTrajectoryRosMessage>("/ihmc_ros/valkyrie/control/chest_trajectory", 1000);
	ros::Publisher go_home_pub = n.advertise<ihmc_msgs::GoHomeRosMessage>("/ihmc_ros/valkyrie/control/go_home",1000);

	//Some SFML stufsf::RenderWindow windowf
	sf::RenderWindow window;
	if(gui.isOcculusOn){
		sf::RenderWindow window(sf::VideoMode(2248, 544), "SFML works!");
		sf::CircleShape shape(100.f);
		shape.setFillColor(sf::Color::Green);
	}
	//Now the primary loop
	ros::Rate loop_rate(6);
	ihmc_msgs::GoHomeRosMessage goHomeMsg;
	goHomeMsg.trajectory_time=3;
	goHomeMsg.unique_id=14;
	long int end, start;
	while (ros::ok()){
		struct timeval tp;
		gettimeofday(&tp, NULL);
		long int start = tp.tv_sec * 1000 + tp.tv_usec / 1000;
		//std::cout<<tracking<<td::endl;
		//Some more SFML stuff
		if(GUIwindow.isOpen()){
			//TODO:add in some human data gathering and print out human position and robot position data
			//Also, add a table for Myo stuff
			pollEventsGUI();
			updateGUI();
		}
		//std::cout<<lastLeftEye.height<<" "<<lastLeftEye.width<<" "<<lastLeftEye.encoding<<" "<<lastLeftEye.step<<" "<<lastLeftEye.data.size()<<" "<<1024*544*3<<std::endl;
		if(gui.isOcculusOn && window.isOpen() && lastLeftEye.data.size()!=0 && lastRightEye.data.size()!=0){
			sf::Event event;
			while (window.pollEvent(event))
			{
				if (event.type == sf::Event::Closed)
					window.close();
			}

			sf::Uint8* pixels = new sf::Uint8[2248 * 544 * 4];
			sf::Image image,imageL,imageR;
			sf::Uint8* Ldata=new sf::Uint8[(size_t)lastLeftEye.data.size()];
			sf::Uint8* Rdata=new sf::Uint8[(size_t)lastRightEye.data.size()];
			for(int i=0;i<(size_t)lastLeftEye.data.size();i++){
				Ldata[i]=lastLeftEye.data[i];
			}
			for(int i=0;i<(size_t)lastRightEye.data.size();i++){
				Rdata[i]=lastRightEye.data[i];
			}
			imageL.loadFromMemory(Ldata,(size_t)lastLeftEye.data.size());
			imageR.loadFromMemory(Rdata,(size_t)lastRightEye.data.size());
			delete Ldata;
			delete Rdata;
			if(imageL.getSize().x!=imageR.getSize().x || imageL.getSize().y!=imageR.getSize().y)
				std::cout<<"imageL:("<<imageL.getSize().x<<","<<imageL.getSize().y<<") imageR:("<<imageR.getSize().x<<","<<imageR.getSize().y<<")"<<std::endl;
			sf::Texture texture;
			int max_x=imageL.getSize().x;
			int max_y=imageL.getSize().y;
			for(int y = 0; y <max_y; y++)
			{
				for(int x = 0; x < max_x; x++)
				{
					pixels[(y*(2*max_x+200)+x)*4]	 = imageL.getPixelsPtr()[((max_y-y)*max_x+x)*4]; // R?
					pixels[(y*(2*max_x+200)+x)*4 + 1] = imageL.getPixelsPtr()[((max_y-y)*max_x+x)*4+1]; // G?
					pixels[(y*(2*max_x+200)+x)*4 + 2] = imageL.getPixelsPtr()[((max_y-y)*max_x+x)*4+2]; // B?
					pixels[(y*(2*max_x+200)+x)*4 + 3] = 255; // A?
				}
				for(int x=1024;x<1224;x++){
					pixels[(y*(2*max_x+200)+x)*4]	 = 0; // R?
					pixels[(y*(2*max_x+200)+x)*4 + 1] = 0; // G?
					pixels[(y*(2*max_x+200)+x)*4 + 2] = 0; // B?
					pixels[(y*(2*max_x+200)+x)*4 + 3] = 255; // A?
				}
				for(int x = (max_x+200); x < (2*max_x+200); x++)
				{
					pixels[(y*(2*max_x+200)+x)*4]	 = imageR.getPixelsPtr()[((max_y-y)*max_x+x-(max_x+200))*4]; // R?
					pixels[(y*(2*max_x+200)+x)*4 + 1] = imageR.getPixelsPtr()[((max_y-y)*max_x+x-(max_x+200))*4+1]; // G?
					pixels[(y*(2*max_x+200)+x)*4 + 2] = imageR.getPixelsPtr()[((max_y-y)*max_x+x-(max_x+200))*4+2]; // B?
					pixels[(y*(2*max_x+200)+x)*4 + 3] = 255; // A?
				}
			}
			window.clear();
			image.create(2248, 544, pixels);
			texture.create(2248, 544);
			texture.update(image);
			sf::Sprite sprite;
			sprite.setTexture(texture);
			window.draw(sprite);
			window.display();
			delete pixels;
		}
		if(gui.goHomeCount==12){
			goHomeMsg.body_part=0;
			goHomeMsg.robot_side=0;
			go_home_pub.publish(goHomeMsg);
			std::cout<<"just published goHome LEFT_ARM"<<std::endl;
			for(int i=0;i<6;i++){
				ros::spinOnce();
				loop_rate.sleep();
			}
			goHomeMsg.robot_side=1;
			go_home_pub.publish(goHomeMsg);
			std::cout<<"just published goHome RIGHT_ARM"<<std::endl;
			for(int i=0;i<6;i++){
				ros::spinOnce();
				loop_rate.sleep();
			}
			goHomeMsg.body_part=1;
			go_home_pub.publish(goHomeMsg);
			std::cout<<"just published goHome TORSO"<<std::endl;
			for(int i=0;i<6;i++){
				ros::spinOnce();
				loop_rate.sleep();
			}
			goHomeMsg.body_part=2;
			go_home_pub.publish(goHomeMsg);
			std::cout<<"just published goHome PELVIS"<<std::endl;
			gui.startGoingHome=false;
			gui.goHomeCount--;
			ros::spinOnce();
			loop_rate.sleep();
			continue;
			/*
			moveSpeedOverRide=3;
			arm_controller.publish(moveArm(JRIGHT,stateTracker.getArmHomeAngles()));
			arm_controller.publish(moveArm(JLEFT,stateTracker.getArmHomeAngles()));
			chest_controller.publish(moveChest(stateTracker.getChestHomeAngles()));
			neck_controller.publish(moveHead(stateTracker.getNeckHomeAngles()));
			pelvis_height_controller.publish(movePelvisHeight(stateTracker.getPelvisHeightHomeAngles()));
			moveSpeedOverRide=-1;*/
		}
		if(!more || justChanged || (gui.goHomeCount>0 && gui.goHomeCount<12)){
			//std::cout<<gui.goHomeCount<<" "<<more<<" "<<justChanged<<std::endl;
			if(justChanged && more)
				justChanged=false;
			/*
			ihmc_msgs::GoHomeRosMessage msg;
			msg.trajectory_time=3;
			msg.unique_id=5;
			go_home_pub.publish(msg);
			msg.robot_side=1;
			go_home_pub.publish(msg);*/
			if(gui.goHomeCount>0)
				gui.goHomeCount--;
			ros::spinOnce();
			loop_rate.sleep();
			continue;
		}
		std::vector<double> out;
		out.push_back((tp.tv_sec * 1000 + tp.tv_usec / 1000)-end);//oh wow, this is terrible programming practice... dang man
		if(end!=0)
			gui.record("*: ",out);

		//gui.gesture==0 acts as an unlock feature
		if(gui.isRightArmOn && gui.gesture==1)
			arm_controller.publish(moveArm(JRIGHT,stateTracker.getRightArmAngles()));
		else if(gui.isRightArmOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			arm_controller.publish(moveArm(JRIGHT,stateTracker.getArmHomeAngles()));
			moveSpeedOverRide=-1;
			moveArm(JRIGHT,stateTracker.getRightArmAngles(),false);
		}

		if(gui.isLeftArmOn && gui.gesture==1)
			arm_controller.publish(moveArm(JLEFT,stateTracker.getLeftArmAngles()));
		else if(gui.isLeftArmOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			arm_controller.publish(moveArm(JLEFT,stateTracker.getArmHomeAngles()));
			moveSpeedOverRide=-1;
			//moveArm(JLEFT,stateTracker.getLeftArmAngles(),false);
		}
		if(gui.isChestOn && gui.gesture==1)
			chest_controller.publish(moveChest(stateTracker.getChestAngles()));
		else if(gui.isChestOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			chest_controller.publish(moveChest(stateTracker.getChestHomeAngles()));
			moveSpeedOverRide=-1;
			//moveChest(stateTracker.getChestAngles(),false);
		}
		if(gui.isHeadOn && gui.gesture==1)
			neck_controller.publish(moveHead(stateTracker.getNeckAngles()));
		else if(gui.isHeadOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			neck_controller.publish(moveHead(stateTracker.getNeckHomeAngles()));
			moveSpeedOverRide=-1;
			//moveHead(stateTracker.getNeckAngles(),false);
		}
		if(gui.isPelvisOn && gui.gesture==1)
			pelvis_height_controller.publish(movePelvisHeight(stateTracker.getStandingHeight()));
		else if(gui.isPelvisOn && gui.gesture!=1 && false){
			moveSpeedOverRide=3;
			pelvis_height_controller.publish(movePelvisHeight(stateTracker.getPelvisHeightHomeAngles()));
			moveSpeedOverRide=-1;
			//movePelvisHeight(stateTracker.getStandingHeight(),false);
		}

		ros::spinOnce();
		gettimeofday(&tp, NULL);
		long int start2 = tp.tv_sec * 1000 + tp.tv_usec / 1000;
		loop_rate.sleep();
		gettimeofday(&tp, NULL);
		end = tp.tv_sec * 1000 + tp.tv_usec / 1000;
		std::cout<<end-start2<<std::endl;
	}
	go_home_pub.publish(goHomeMsg);
	goHomeMsg.robot_side=1;
	go_home_pub.publish(goHomeMsg);
	return 0;
}
/**
 * Actions to do when going to sleep
 */
uint8_t Controller::sleep()
{
	mWakeUp = false;
	moveHead(HEAD_SLEEP_X, HEAD_SLEEP_Z);
	return nero_msgs::Emotion::SLEEP;
}
ihmc_msgs::NeckTrajectoryRosMessage moveHead(std::vector<double> angles){return moveHead(angles,true);}
Exemple #14
0
int main(int argc, char* argv[])
{
	
	int boardN = 0;
	int ret = 0;
	int rhoSize = 256;
	int thetaSize = 180;
	
	YARPImageOf<YarpPixelMono> hough_img;
	char c;

	printf("\n[eyeCalib] Setting up the sensor..");
	ret = _grabber.initialize(boardN, _sizeX, _sizeY);
	if (ret == YARP_FAIL)
	{
		printf("[eyeCalib] ERROR in _grabber.initialize(). Quitting.\n");
		exit (1);
	}

	initializeHead();

	printf("\n[eyeCalib] Allocating images and filter (%d x %d)...", _sizeX, _sizeY);
	for (int i=0; i<N_IMAGES; i++)
		_imgBuffer[i].Resize (_sizeX, _sizeY);
	_susanImg.Resize (_sizeX, _sizeY);
	_susanFlt.resize(_sizeX, _sizeY);
	printf("\n[eyeCalib] Allocating Hough Filter (theta %d, rho %d)..", thetaSize, rhoSize);
	_houghFlt.resize(thetaSize, rhoSize);
	char fileName[255];
	FILE *outFile = NULL;
	double orientation;
	double posVector[4];
	double vel;
	printf("\n[eyeCalib] Do you want to save results to file ? [Y/n] ");
	c = getch();
	if (c != 'n')
	{
		
		printf("\n[eyeCalib] File name ? ");
		scanf("%s", fileName);
		printf("[eyeCalib] Append data (Y/n)?");
		c = getch();
		if (c == 'n')
		{
			outFile = fopen(fileName,"w");
			fprintf(outFile, "velocity;J0;J1;J2;J3;orientation\n");
		}
		else
		{
			outFile = fopen(fileName,"a");
		}

		if (!outFile)
		{
			printf("[eyeCalib] ERROR opening the file %s. Saving aboorted.\n", fileName);
		}
	}
	printf("\n[eyeCalib] Interactive calibration or Load data form file? [I/l] ");
	c = getch();
	if ( c == 'l')
	{
		printf("\n[eyeCalib] File to load? ");
		scanf("%s", fileName);
		FILE *dataFile = NULL;
		dataFile = fopen(fileName,"r");
		if (dataFile == NULL)
		{
			printf("[eyeCalib] ERROR opening the file %s. Quitting.\n", fileName);
			releaseSensor();
			if (outFile != NULL)
				fclose(outFile);
			exit(1);
		}
		bool ret = false;
		while (readLine(dataFile, &vel, posVector) == true)
		{
			printf("\n[eyeCalib] Moving [%.2lf %.2lf %.2lf %.2lf * %.2lf]..", posVector[0], posVector[1], posVector[2], posVector[3], vel);
			printf("\n[eyeCalib] Hit any key when Position has been reached..");
			moveHead(vel,posVector);
			c = getch();
			orientation = calibrate();
			printf("\n[eyeCalib] Main orientation is %.3frad (%.3fdeg) - variance %.2f", orientation, _rad2deg(orientation));
			if (outFile != NULL)
				fprintf(outFile, "%lf;%lf;%lf;%lf;%lf;%lf\n", vel, posVector[0], posVector[1], posVector[2], posVector[3], orientation);
		}
		fclose(dataFile);
	}
	else
	{
		do
		{
			printf("\n[eyeCalib] Avaible Joints:\n\t0 - Left Pan\n\t1 - Left Tilt\n\t2 - Right Pan\n\t3 - Left Tilt");
			printf("\n[EyeCalib] Move to? [J0;J1;J2;J3] ");
			scanf("%lf;%lf;%lf;%lf", &(posVector[0]), &(posVector[1]), &(posVector[2]), &(posVector[3]));
			printf("\n[eyeCalib] Move with velocity? ");
			scanf("%lf", &vel);
			printf("\n[eyeCalib] Moving [%.2lf %.2lf %.2lf %.2lf * %.2lf]..", posVector[0], posVector[1], posVector[2], posVector[3], vel);
			printf("\n[eyeCalib] Hit any key when Position has been reached..");
			moveHead(vel, posVector);
			c = getch();
			orientation = calibrate();
			printf("\n[eyeCalib] Main orientation is %.3frad (%.3fdeg) - variance %.2f", orientation, _rad2deg(orientation));
			if (outFile != NULL)
				fprintf(outFile, "%lf;%lf;%lf;%lf;%lf;%lf\n", vel, posVector[0], posVector[1], posVector[2], posVector[3], orientation);
			printf("\n[eyeCalib] Another loop ? [Y/n] ");
			c = getch();
		} while (c != 'n');
	}

	if (outFile != NULL)
			fclose(outFile);
	releaseSensor();
	releaseHead();

	printf("\n[eyeCalib] Bye.\n");

	return 0;
}
Exemple #15
0
/**************************************
 * Definition: Strafes the robot until it is considered centered
 *             between two squares in a corridor
 **************************************/
void Robot::center() {
    moveHead(RI_HEAD_MIDDLE);
	
    int turnAttempts = 0;

    Camera::prevTagState = -1;
    while (true) {
        bool turn = false;

        float centerError = _camera->centerError(COLOR_PINK, &turn);
        if (turn) {
            if (_centerTurn(centerError)) {
                break;
            }
            turnAttempts++;
        }
        else {
            if (_centerStrafe(centerError)) {
                break;
            }
        }
        
        if (turnAttempts > 2) {
            moveHead(RI_HEAD_DOWN);

            // make sure we are close to our desired heading, in case we didn't move correctly
            updatePose(false);

            float thetaHeading;
            switch (_heading) {
            case DIR_NORTH:
	        thetaHeading = DEGREE_90;
                break;
            case DIR_SOUTH:
                thetaHeading = DEGREE_270;
                break; 
            case DIR_EAST:
                thetaHeading = DEGREE_0;
                break;
            case DIR_WEST:
                thetaHeading = DEGREE_180;
                break;
            }

            float theta = _pose->getTheta();
            float thetaError = thetaHeading - theta;
            thetaError = Util::normalizeThetaError(thetaError);
            if (fabs(thetaError) > DEGREE_45) {
                // if we turned beyond 45 degrees from our
                // heading, this was a mistake. let's turn
                // back just enough so we're 45 degrees from
                // our heading.
		        float thetaGoal = Util::normalizeTheta(theta + (DEGREE_45 + thetaError));
		        turnTo(thetaGoal, MAX_THETA_ERROR);
            }

            turnAttempts = 0;
            moveHead(RI_HEAD_MIDDLE);
        }
    }

    moveHead(RI_HEAD_DOWN);

    _centerTurnPID->flushPID();
    _centerStrafePID->flushPID();
}
/**
 * Actions to do when waking up
 */
uint8_t Controller::wakeUp()
{
	mWakeUp = true;
	moveHead(HEAD_INIT_X, HEAD_INIT_Z);
	return nero_msgs::Emotion::NEUTRAL;
}
Exemple #17
0
Robot::Robot(std::string address, int id) {
    // store the robot's name as an int to be used later
    _name = Util::nameFrom(address);
    
    // initialize movement variables
    _numCellsTraveled = 0;
    _turnDirection = 0;
    _movingForward = true;
    _speed = 0;
    _heading = DIR_NORTH;

    setFailLimit(MAX_UPDATE_FAILS);

    _robotInterface = new RobotInterface(address, id);

    printf("robot interface loaded\n");

    // initialize camera
    _camera = new Camera(_robotInterface);

    // initialize position sensors
    _wheelEncoders = new WheelEncoders(this);
    _northStar = new NorthStar(this);
    
    // initialize global pose
    _pose = new Pose(0.0, 0.0, 0.0);
    // bind _pose to the kalman filter
    _kalmanFilter = new KalmanFilter(_pose);
    _kalmanFilter->setUncertainty(PROC_X_UNCERTAIN,
                                  PROC_Y_UNCERTAIN,
                                  PROC_THETA_UNCERTAIN,
                                  NS_X_UNCERTAIN,
                                  NS_Y_UNCERTAIN,
                                  NS_THETA_UNCERTAIN,
                                  WE_X_UNCERTAIN,
                                  WE_Y_UNCERTAIN,
                                  WE_THETA_UNCERTAIN);

    printf("kalman filter initialized\n");

    PIDConstants movePIDConstants = {PID_MOVE_KP, PID_MOVE_KI, PID_MOVE_KD};
    PIDConstants turnPIDConstants = {PID_TURN_KP, PID_TURN_KI, PID_TURN_KD};

    _movePID = new PID(&movePIDConstants, MIN_MOVE_ERROR, MAX_MOVE_ERROR);
    _turnPID = new PID(&turnPIDConstants, MIN_TURN_ERROR, MAX_TURN_ERROR);

    printf("pid controllers initialized\n");
    
    // Put robot head down for NorthStar use
    moveHead(RI_HEAD_DOWN);

    printf("collecting initial data\n");
    // fill our sensors with data
    prefillData();
    // base the wheel encoder pose off north star to start,
    // since we might start anywhere in the global system)
    _wheelEncoders->resetPose(_northStar->getPose());
    // now update our pose again so our global pose isn't
    // some funky value (since it's based off we and ns)
    updatePose(true);

    // load the map once we've found our position in the global
    // system, so we can know what cell we started at
    int startingX;
    int startingY;
    if (_pose->getX() > 150) {
        startingX = 0;
        startingY = 2;
    }
    else {
        startingX = 6;
        startingY = 2;
    }
    
    _map = new Map(_robotInterface, startingX, startingY);
    _mapStrategy = new MapStrategy(_map);

    printf("map loaded\n");
}
Exemple #18
0
    qthread->start(QThread::HighestPriority);


    this->portEnum = new QextSerialEnumerator(this);
    this->portEnum->setUpNotifications();
    QList<QextPortInfo> ports = this->portEnum->getPorts();
    //finding avalible ports
    foreach (QextPortInfo info, ports) {
        ui->portCombo->addItem(info.portName);
    }
    //connecting travel moves checkbox
    connect(ui->showTravelChkBox, SIGNAL(toggled(bool)),ui->glWidget, SLOT(showTravel(bool)));
    //disable steppers btn
    connect(ui->disableStpBtn, SIGNAL(clicked()), printerObj, SLOT(disableSteppers()), Qt::QueuedConnection);
    //connect head move widget
    connect(ui->headControlWidget, SIGNAL(clicked(QPoint)), this, SLOT(moveHead(QPoint)));
    connect(ui->headControlWidget, SIGNAL(hovered(QPoint)), this, SLOT(updateHeadGoToXY(QPoint)));

    ui->printBtn->setDisabled(true);
    ui->pauseBtn->setDisabled(true);
    ui->axisControlGroup->setDisabled(true);
    ui->headControlWidget->hidePoints(true);
    ui->temperatureGroupBox->setDisabled(true);
    this->calibrateDialog->setDisabled(true);
    ui->macroGroup->setDisabled(true);
    ui->progressBar->hide();
    ui->baudCombo->addItem("1200", BAUD1200);
    ui->baudCombo->addItem("2400", BAUD2400);
    ui->baudCombo->addItem("4800", BAUD4800);
    ui->baudCombo->addItem("9600", BAUD9600);
    ui->baudCombo->addItem("19200", BAUD19200);
uint8_t Controller::get(int object)
{
	mBusy = true;
	float min_y = 0.f;

	mLock = LOCK_ARM;
	moveArm(MIN_ARM_X_VALUE, MIN_ARM_Z_VALUE);
	waitForLock();

	if (mPathingEnabled)
	{
		//Go to the table
		updateRobotPosition();

		moveHead(0.0, 0.0);
		mLock = LOCK_PATH;
		moveBase(mGoal);
		waitForLock();

		ROS_INFO("Reached Goal");
	}

	//Aim Kinect to the table
	setFocusFace(false);
	mLock = LOCK_HEAD;
	moveHead(VIEW_OBJECTS_ANGLE, 0.0);
	waitForLock();

	//Start object recognition process
	geometry_msgs::PoseStamped objectPose;
	double drive_time = 0.0;

	uint8_t attempts = 0;
	for (attempts = 0; attempts < MAX_GRAB_ATTEMPTS; attempts++)
	{
		// start depth for object grabbing (takes a second to start up, so we put it here)
		setDepthForced(true);
	
		uint8_t i = 0;
		for (i = 0; i < 3; i++)
		{
			mLock = LOCK_HEAD;
			switch (i)
			{
			case 0: moveHead(VIEW_OBJECTS_ANGLE, 0.0); break;
			case 1: moveHead(VIEW_OBJECTS_ANGLE, MIN_VIEW_ANGLE); break;
			case 2: moveHead(VIEW_OBJECTS_ANGLE, MAX_VIEW_ANGLE); break;
			}
			waitForLock();

			if (findObject(object, objectPose, min_y) && objectPose.pose.position.y != 0.0)
			{
				// object found with turned head? rotate base
				if (i != 0)
				{
					mLock = LOCK_BASE;
					moveHead(VIEW_OBJECTS_ANGLE, 0.0);
					switch (i)
					{
					case 1: rotateBase(MIN_VIEW_ANGLE); break;
					case 2: rotateBase(MAX_VIEW_ANGLE); break;
					}
					waitForLock();

					if (findObject(object, objectPose, min_y) == false || objectPose.pose.position.y == 0.0)
					{
						ROS_ERROR("Failed to find object, quitting script.");
						if (mPathingEnabled)
							returnToOriginalPosition();
							
						setDepthForced(false);
						return nero_msgs::Emotion::SAD;
					}
				}
				break;
			}
		}

		// found an object, now rotate base to face the object directly and
		// create enough space between the robot and the table for the arm to move up
		double yaw = -atan(objectPose.pose.position.x / objectPose.pose.position.y);
		while (fabs(yaw) > TARGET_YAW_THRESHOLD || fabs(TABLE_DISTANCE - min_y) > TABLE_DISTANCE_THRESHOLD)
		{
			if (fabs(TABLE_DISTANCE - min_y) > TABLE_DISTANCE_THRESHOLD)
			{
				ROS_INFO("Moving by %lf. Distance: %lf", min_y - TABLE_DISTANCE, min_y);

				mLock = LOCK_BASE;
				positionBase(min_y - TABLE_DISTANCE);
				waitForLock();
			}
			else
			{
				ROS_INFO("Rotating by %lf.", yaw);

				mLock = LOCK_BASE;
				rotateBase(yaw);
				waitForLock();
			}

			if (findObject(object, objectPose, min_y) == false || objectPose.pose.position.y == 0.0)
			{
				ROS_ERROR("Failed to find object, quitting script.");
				if (mPathingEnabled)
					returnToOriginalPosition();
				return nero_msgs::Emotion::SAD;
			}

			yaw = -atan(objectPose.pose.position.x / objectPose.pose.position.y);
		}
		
		setDepthForced(false); // we are done with object finding

		ROS_INFO("yaw: %lf", yaw);
		ROS_INFO("distance: %lf", fabs(TABLE_DISTANCE - min_y));

		//Move arm to object and grab it
		objectPose.pose.position.x = 0.0; // we are straight ahead of the object, so this should be 0.0 (it is most likely already close to 0.0)
		mLock = LOCK_ARM;
		moveArm(objectPose);
		waitForLock();

		//Be ready to grab object
		setGripper(true);
		usleep(1000000);
		setGripper(false);

		//Move forward to grab the object
		double expected_drive_time = (objectPose.pose.position.y - ARM_LENGTH) / GRAB_TARGET_SPEED;
		drive_time = positionBaseSpeed(expected_drive_time + EXTRA_GRAB_TIME, GRAB_TARGET_SPEED, true);
		if (drive_time >= expected_drive_time + EXTRA_GRAB_TIME)
		{
			ROS_ERROR("Been driving forward too long!");

			// open gripper again
			setGripper(true);

			expressEmotion(nero_msgs::Emotion::SAD);
			positionBaseSpeed(drive_time, -GRAB_TARGET_SPEED);

			//Move object to body
			mLock = LOCK_ARM;
			moveArm(MIN_ARM_X_VALUE, MIN_ARM_Z_VALUE);
			waitForLock();

			expressEmotion(nero_msgs::Emotion::NEUTRAL);
		}
		else
			break;
	}

	// no more attempts left, we are sad
	if (attempts >= MAX_GRAB_ATTEMPTS)
	{
		ROS_ERROR("No more grab attempts left.");
		if (mPathingEnabled)
			returnToOriginalPosition();
		return nero_msgs::Emotion::SAD;
	}

	// little hack to allow the gripper to close
	usleep(500000);

	//Lift object
	mLock = LOCK_ARM;
	moveArm(objectPose.pose.position.x, objectPose.pose.position.z + LIFT_OBJECT_DISTANCE);
	waitForLock();

	//Move away from table
	positionBaseSpeed(drive_time, -GRAB_TARGET_SPEED);

	//Move object to body
	mLock = LOCK_ARM;
	moveArm(MIN_ARM_X_VALUE, MIN_ARM_Z_VALUE);
	waitForLock();

	if (mPathingEnabled)
		returnToOriginalPosition();

	moveHead(FOCUS_FACE_ANGLE, 0.0);

	//Deliver the juice
	//mLock = LOCK_ARM;
	//moveArm(DELIVER_ARM_X_VALUE, DELIVER_ARM_Z_VALUE);
	//waitForLock();

	mBusy = false;
	return nero_msgs::Emotion::HAPPY;
}
/*
 * The main method of this node, which receives a disparity image and computes the selected ROI of the
 * object
 */
void StereoSelector::disparityImageCallback(const stereo_msgs::DisparityImage::ConstPtr& disp_image_ptr)
{


	//Get last value of the head movement
	private_nh_.getParam("/qbo_stereo_selector/move_head", move_head_bool_);


	sensor_msgs::Image image = disp_image_ptr->image;
	cv_bridge::CvImagePtr cv_ptr;

    try
    {
    	cv_ptr = cv_bridge::toCvCopy(image, sensor_msgs::image_encodings::TYPE_32FC1);
    }
    catch (cv_bridge::Exception& e)
    {
		ROS_ERROR("Error receiving the disparity image. cv_bridge exception: %s", e.what());
		return;
    }

    cv::Mat disparity_image = cv_ptr->image;

    /*
     * Initializing image in which each pixel correspond to the distance of that point
     */
    cv::Mat point_distances = cv::Mat::ones(disparity_image.size(), CV_32FC1);

    //Calculate distances for each point
    point_distances = point_distances*disp_image_ptr->f * disp_image_ptr->T;
    point_distances = point_distances/disparity_image;

    for(int i = 0; i<disparity_image.rows;i++)
    {
    	for(int j = 0; j<disparity_image.cols;j++)
    	{
    		if(disparity_image.at<float>(i,j)<=disp_image_ptr->min_disparity || disparity_image.at<float>(i,j)>disp_image_ptr->max_disparity)
    		{
    			//If point is irregular, consider it to be very far away
    			point_distances.at<float>(i,j) = distance_filter_threshold_*10;

    		}
    	}
    }


    /*
     * Calculating max distance point
     */
    double minVal, maxVal;
    cv::Point minPt, maxPt;
    cv::minMaxLoc(point_distances, &minVal, &maxVal, &minPt, &maxPt);


    /*
     * Normalized point distance image where values are from 0-255, in which closer points
     * have higher values and further points have lower values
     */

    cv::Mat point_distances_normalized = 255-point_distances*255/maxVal;

    //Applying filter using the distance filter
	for(int i = 0; i<point_distances.rows;i++)
	   for(int j = 0; j<point_distances.cols;j++)
			if(point_distances.at<float>(i,j)<=0 || point_distances.at<float>(i,j)>distance_filter_threshold_)
				point_distances_normalized.at<float>(i,j) = 0;

	/*
	 * Distance image representation with 1byte per pixel
	 */
    cv::Mat distance_image_8bit;
    point_distances_normalized.convertTo(distance_image_8bit, CV_8UC1);



    //Computing selection
    if(object_detected_bool_ == false)
    {
    	ROS_INFO("Searching for object using FIND ROI. Best object mean till now: %lg. Distance threshold: %lg",object_roi_mean_, distance_filter_threshold_);
    	object_detected_bool_ = findRoi(point_distances_normalized);

    }
    else
    {
    	ROS_INFO("Selecting object using CAM Shift");
    	detectCamShift(distance_image_8bit);
    }


    //If object found, calculate closest point
    if(object_detected_bool_)
    {
    	object_mask_ = distance_image_8bit;

    	cv::minMaxLoc(point_distances(object_roi_), &minVal, &maxVal, &minPt, &maxPt);
    	object_distance_ = minVal;

    	object_pos_ = cv::Point(object_roi_.x+object_roi_.width/2 - image_size_.width/2,
    			object_roi_.y+object_roi_.height/2-image_size_.height/2);

    	ROS_INFO("OBJECT FOUND -> Position (%d, %d), Distance: %lg", object_pos_.x, object_pos_.y, object_distance_);

    	//Update undetected_count counter
    	undetected_count_ = 0;
    }
    else
    	undetected_count_++;

    if(move_head_bool_)
    	moveHead();

//    cv::imshow("Stereo Selector Image",distance_image_8bit);
//    cv::waitKey(10);
}
Exemple #21
0
/**************************************
 * Definition: Moves the robot in the specified direction
 *             the specified number of cells (65x65 area)
 *
 * Parameters: ints specifying direction and number of cells
 **************************************/
void Robot::move(int direction, int numCells) {
    _heading = direction;

    int cellsTraveled = 0;
    while (cellsTraveled < numCells) {
        if(_numCellsTraveled != 0) {
            // make sure we're facing the right direction first
            if (nsThetaReliable()) {
                turn(direction);
            }
            // center ourselves between the walls
            center();
            // turn once more to fix any odd angling 
            if (nsThetaReliable()) {
                turn(direction);
            }
        }

        // count how many squares we see down the hall
        moveHead(RI_HEAD_MIDDLE);
        float leftSquareCount = _camera->avgSquareCount(COLOR_PINK, IMAGE_LEFT);
        float rightSquareCount = _camera->avgSquareCount(COLOR_PINK, IMAGE_RIGHT);
        moveHead(RI_HEAD_DOWN);
        if (leftSquareCount > 3.0) {
            leftSquareCount = 3.0;
        }
        if (rightSquareCount > 3.0) {
            rightSquareCount = 3.0;
        }

        // update our pose estimates now, ignoring
        // wheel encoders and setting them to be north star's
        updatePose(false);
        _wheelEncoders->resetPose(_northStar->getPose());
        // finally, update our pose one more time so kalman isn't wonky
        updatePose(true);

        // based on the direction, move in the global coord system
        float goalX = _pose->getX();
        float goalY = _pose->getY();
        float distErrorLimit = MAX_DIST_ERROR;
        switch (direction) {
        case DIR_NORTH:
            goalY += CELL_SIZE;
            break;
        case DIR_SOUTH:
            goalY -= CELL_SIZE;
            break;
        case DIR_EAST:
            goalX += CELL_SIZE;
            if (_map->getCurrentCell()->x == 0 ||
                _map->getCurrentCell()->x == 1) {
                goalX -= 30.0;
            }
            break;
        case DIR_WEST:
            goalX -= CELL_SIZE;
            if (_map->getCurrentCell()->x == 0 ||
                _map->getCurrentCell()->x == 1) {
                goalX += 30.0;
            }
            break;
        }

        moveTo(goalX, goalY, distErrorLimit);
        // make sure we stop once we're there
        stop();

        moveHead(RI_HEAD_MIDDLE);
        float newLeftSquareCount = _camera->avgSquareCount(COLOR_PINK, IMAGE_LEFT);
        float newRightSquareCount = _camera->avgSquareCount(COLOR_PINK, IMAGE_RIGHT);
        if (newLeftSquareCount > 3.0) {
            newLeftSquareCount = 3.0;
        }
        if (newRightSquareCount > 3.0) {
            newRightSquareCount = 3.0;
        }
        int i = 0;
        while ((newLeftSquareCount + 1.0 < leftSquareCount ||
                newRightSquareCount + 1.0 < rightSquareCount) &&
                i < 5) {
            moveBackward(10);
            _robotInterface->reset_state();
            newLeftSquareCount = _camera->avgSquareCount(COLOR_PINK, IMAGE_LEFT);
            newRightSquareCount = _camera->avgSquareCount(COLOR_PINK, IMAGE_RIGHT);
            if (newLeftSquareCount + 1.0 < leftSquareCount &&
                newRightSquareCount + 1.0 < rightSquareCount) {
                i++;
            }     
            i++;
        }
        moveHead(RI_HEAD_DOWN);

        // we made it!
        cellsTraveled++;
        printf("Made it to cell %d\n", cellsTraveled);
    }
}