void mouse(int button, int state, int x, int y) { if(state == GLUT_DOWN) { switch(button) { case GLUT_LEFT_BUTTON: mot = PAN; motion(ox = x, oy = y); break; case GLUT_RIGHT_BUTTON: mot = ROT; motion(ox = x, oy = y); break; case GLUT_MIDDLE_BUTTON: break; } } else if (state == GLUT_UP) { mot = 0; } }
void CH3View::OnMouseMove(UINT nFlags, CPoint point) { // TODO: Add your message handler code here and/or call default CView::OnMouseMove(nFlags, point); if (drag) motion(point.x, point.y, nFlags & MK_SHIFT, nFlags & MK_CONTROL); else passive(point.x, point.y, nFlags & MK_SHIFT, nFlags & MK_CONTROL); }
void mouse(int button, int state, int x, int y) { if(state == GLUT_DOWN) switch(button) { case GLUT_LEFT_BUTTON: /* move the light */ active = OBJ_ANGLE; motion(x, y); break; case GLUT_MIDDLE_BUTTON: active = OBJ_PICK; motion(x, y); break; case GLUT_RIGHT_BUTTON: /* move the polygon */ active = OBJ_TRANSLATE; motion(x, y); break; } }
Object* ObjectFactory::object(const UUID& id) { assert( mObjectIDs.find(id) != mObjectIDs.end() ); ObjectMap::iterator it = mObjects.find(id); if (it != mObjects.end()) return it->second; Object* new_obj = new Object(this, id, motion(id), bounds(id), registerQuery(id), queryAngle(id), mContext); mObjects[id] = new_obj; return new_obj; }
void Trackball::stop(int x, int y) { motion(x, y); cq[0] = tq[0]; cq[1] = tq[1]; cq[2] = tq[2]; cq[3] = tq[3]; dragged = false; }
void Hero::render( SDL_Renderer *&renderer ) { motion(); fire.render( renderer ); for( Uint8 i = 0; i < how_many; i++ ) bullet[ i ].render( renderer ); SDL_RenderCopy( renderer, texture, NULL, &rect ); }
void mouse(int button, int state, int x, int y) { /* hack for 2 button mouse */ if (button == GLUT_LEFT_BUTTON && glutGetModifiers() & GLUT_ACTIVE_SHIFT) button = GLUT_MIDDLE_BUTTON; if(state == GLUT_DOWN) switch(button) { case GLUT_LEFT_BUTTON: /* move the light */ active = MOVE_LIGHT_XY; motion(x, y); break; case GLUT_MIDDLE_BUTTON: active = MOVE_LIGHT_Z; motion(x, y); break; } }
void Guide::changePermanent(float lasting) { if (perpetual) { if (path[pt][3]) { motion(); if ((floor(pos.x) == path[pt+1][0]) && (floor(pos.y) == path[pt+1][1])) pt++; } if (pt == pts) pt = 0; } }
void ScavTaskColorShirt::executeTask(int timeout, TaskResult &result, std::string &record) { boost::thread motion( &ScavTaskColorShirt::motionThread, this); boost::thread vision( &ScavTaskColorShirt::visionThread, this); motion.join(); vision.join(); record = path_to_image; result = SUCCEEDED; }
void mydisplay() { char str[15]; int l; cnt++; //float e=0,f=0.3,g=1; float e=0,f=0,g=1; sprintf(str, "%d", score); glClearColor(1.0f,0.5f,0.0,0.0f); glClear(GL_COLOR_BUFFER_BIT); outputs(330,15,0.5,0,0.5,"SCORE: "); outputs(420,15,0.5,0,0.5,str); if(cnt%3==0) { e=0;f=0;g=1; } else if(cnt%3==1) { e=0;f=1;g=0; } else { e=1;f=0;g=0; } outputs(160,90,e,f,g,"BUBBLE ZAP "); for(l=0;l<5;l++) { // if(((cir[l].y+cir[l].rr)<b3)&&((cir[l].x+cir[l].rr)>a2)&&((((cir[l].x-cir[l].rr)>a2)&&((cir[l].x-cir[l].rr)<a4))||(((cir[l].x+cir[l].rr)>a2)||((cir[l].x+cir[l].rr)<a4)))) if((cir[l].y-cir[l].rr)<0) { lose=1; } } if(lose==1) outputs(170,240,0.5,0,0.5,"GAME OVER"); int i; glBegin(GL_POLYGON); glColor3f(0.0f,0.0f,1.0f); glVertex2i(a1,b1); glVertex2i(a2,b2); glVertex2i(a3,b3); glVertex2i(a4,b4); glEnd(); glFlush(); for(i=0;i<5;i++) { circle(cir[i].x,cir[i].y,cir[i].rr,cir[i].red,cir[i].g,cir[i].b); } motion(); glutPostRedisplay(); }
void mouse(int button, int state, int x, int y) { if(state == GLUT_DOWN) switch(button) { case GLUT_LEFT_BUTTON: /* move the light */ active = OBJ_ANGLE; motion(x, y); break; break; } }
textoff() { sync(); /* * The following is needed because going into text mode * leaves the pen where the cursor last was. */ _penx = -40; _peny = 40; escseq(ESCP); outchar('a'); motion(_supx, _supy); _penx = _supx; _peny = _supy; }
void mouse(int button, int state, int x, int y) { /* hack for 2 button mouse */ if (button == GLUT_LEFT_BUTTON && glutGetModifiers() & GLUT_ACTIVE_SHIFT) button = GLUT_MIDDLE_BUTTON; if(state == GLUT_DOWN) switch(button) { case GLUT_LEFT_BUTTON: /* move the light */ active = OBJ_ANGLE; motion(x, y); break; case GLUT_MIDDLE_BUTTON: active = CUTTING; motion(x, y); break; case GLUT_RIGHT_BUTTON: /* move the polygon */ active = SLICES; motion(x, y); break; } }
int main(int argc, char** argv) { std::string ip = "192.168.11.9"; std::string port = "9559"; if (argc > 1) ip = argv[1]; if (argc > 2) port = argv[2]; int portn = std::atoi(port.c_str()); AL::ALBrokerManager::getInstance()->killAllBroker(); AL::ALBroker::Ptr broker = AL::ALBroker::createBroker("main", "0.0.0.0", 54000, ip, portn); try { auto hvc = AL::ALModule::createModule<HandVoiceControl>(broker, "HandVoiceControl"); hvc->startRecognition(); AL::ALMotionProxy motion(broker); auto d = AL::ALModule::createModule<Diagnostics>(broker, "Diagnostics"); d->reportStatus(); static bool finish = false; while (!finish) { motion.openHand("RHand"); motion.setStiffnesses("HeadYaw", 0.1f); motion.angleInterpolation("HeadYaw", 0.2f, 1.0f, true); qi::os::msleep(1000); motion.closeHand("RHand"); motion.angleInterpolation("HeadYaw", 0.0f, 1.0f, true); qi::os::msleep(1000); } hvc->stopRecognition(); } catch (const AL::ALError& e) { qiLogError("error") << e.what() << std::endl; } //AL::ALMotionProxy motion(ip, portn); //int a = motion.post.openHand("RHand"); //tts.callVoid("say", std::string("opening")); //int b = motion.post.closeHand("RHand"); //tts.callVoid("say", std::string("closing")); return 0; }
void Trackball::drag(int x, int y) { switch (btn) { case GLUT_LEFT_BUTTON: motion(x, y); break; case GLUT_RIGHT_BUTTON: break; default: break; } }
void mouse(int button, int state, int x, int y) { /* hack for 2 button mouse */ if (button == GLUT_LEFT_BUTTON && glutGetModifiers() & GLUT_ACTIVE_SHIFT) button = GLUT_MIDDLE_BUTTON; if(state == GLUT_DOWN) { switch(button) { case GLUT_LEFT_BUTTON: mot = PAN; motion(ox = x, oy = y); break; case GLUT_RIGHT_BUTTON: mot = ROT; motion(ox = x, oy = y); break; case GLUT_MIDDLE_BUTTON: break; } } else if (state == GLUT_UP) { mot = 0; } }
void mouse(int button, int state, int x, int y) { if (button == GLUT_LEFT_BUTTON) { if (state == GLUT_DOWN) { userInControl = 1; xOnButtonDown = x; yOnButtonDown = y; motion(x, y); } else if (state == GLUT_UP) userInControl = 0; } }
void mouse(int b, int s, int x, int y) { float selx, sely; mousex = x; mousey = y; curX = x; curY = y; if (s == GLUT_DOWN) { switch (b) { case GLUT_LEFT_BUTTON: if (solving) { freeSolutions(); solving = 0; glutChangeToMenuEntry(1, "Solving", 1); glutSetWindowTitle("glpuzzle"); movingPiece = 0; } left_mouse = GL_TRUE; sel_piece = selectPiece(mousex, mousey); if (computeCoords(sel_piece, mousex, mousey, &selx, &sely)) { grabPiece(sel_piece, selx, sely); } glutPostRedisplay(); break; case GLUT_MIDDLE_BUTTON: middle_mouse = GL_TRUE; glutPostRedisplay(); break; } } else { switch (b) { case GLUT_LEFT_BUTTON: left_mouse = GL_FALSE; dropSelection(); glutPostRedisplay(); break; case GLUT_MIDDLE_BUTTON: middle_mouse = GL_FALSE; glutPostRedisplay(); break; } } motion(x, y); }
bool predicate( CEntityAlive& ea, const SHit& H, MotionID &m, float &angle ) const { m = MotionID(); if( H.initiator() != Level().CurrentControlEntity()) return false; VERIFY( ea.Visual( ) ); IKinematics *K = ea.Visual()->dcast_PKinematics(); VERIFY( K ); if( is_bone_head( *K, H.bone() ) ) { edirection dr = dir( ea, H, angle ); m = motion( dr ); type_motion_diagnostic( " type_motion3: 4. ’едшот (по веро¤тности), кроме 5 (4)", dr, ea, H, m ); return true; } return false; }
bool predicate( CEntityAlive& ea, const SHit& H, MotionID &m, float &angle ) const { if( H.initiator() != Level().CurrentControlEntity()) return false; m = MotionID(); VERIFY( ea.Visual( ) ); IKinematics *K = ea.Visual()->dcast_PKinematics(); VERIFY( K ); if(is_snipper( H.weaponID ) && !is_bone_head( *K, H.bone() )) { edirection dr = dir( ea, H, angle ); m = motion( dr ); type_motion_diagnostic( "type_motion5: 6. —найперка в тело", dr, ea, H, m ); return true; } return false; }
void ScavTaskWhiteBoard::executeTask(int timeout, TaskResult &result, std::string &record) { boost::thread motion( &ScavTaskWhiteBoard::motionThread, this); boost::thread vision( &ScavTaskWhiteBoard::visionThread, this); ros::Rate r(2); while (ros::ok() and r.sleep()) { if (task_completed) { search_planner->cancelCurrentGoal(); break; } } motion.detach(); vision.detach(); record = wb_path_to_image; result = SUCCEEDED; }
bool predicate( CEntityAlive& ea, const SHit& H, MotionID &m, float &angle ) const { m = MotionID(); if( H.initiator() != Level().CurrentControlEntity()) return false; VERIFY( ea.Visual( ) ); IKinematics *K = ea.Visual()->dcast_PKinematics(); VERIFY( K ); if( !is_bone_head( *K, H.bone() )) return false; //CAI_Stalker* s = ea.cast_stalker (); CCharacterPhysicsSupport* chs = ea.character_physics_support(); if( !chs || chs->Type( ) == CCharacterPhysicsSupport::etBitting ) return false; VERIFY( chs->movement() ); const Fvector stalker_velocity = chs->movement()->GetVelocity(); const float stalker_speed = stalker_velocity.magnitude(); const float min_stalker_speed = 3.65f; if(stalker_speed < min_stalker_speed ) return false; const Fvector stalker_velocity_dir = Fvector().mul( stalker_velocity, 1.f/stalker_speed ); const Fvector dir_to_actor = Fvector().sub( H.initiator()->Position(), ea.Position() ).normalize_safe(); const float front_angle_cos = _cos( deg2rad ( 20.f ) ); if( stalker_velocity_dir.dotproduct(dir_to_actor) < front_angle_cos ) return false; if( type_motion::front != type_motion::dir( ea, H, angle ) ) return false; Fvector p; if( Fvector().sub( H.initiator()->Position(), global_hit_position( p, ea, H ) ).magnitude() > 30.f ) return false; m = motion( front ); type_motion_diagnostic( " type_motion0: 1. = »нерционное движение вперед от попадани¤ в голову ", front, ea, H, m ); return true; }
void Locomotion:: walkToObject(Object& object, SpaceOrientation& spaceOrientation) { std::cout << "1Object position: " << object.getPositionInRobotFrame() << std::endl; // std::cout << "1NAO position: " << spaceOrientation.getNaoPositionInRobotFrame() << std::endl; // turnToObject(object, spaceOrientation); // float distance = spaceOrientation.computeDistance(object); // std::cout << "computed distance" << distance << std::endl; cv::Point2d obj = object.getPositionInRobotFrame(); obj.x += LATERAL_DISTANCE_CORRECTION_FOR_HAND_GRASPING; obj.y += FORWARD_DISTANCE_CORRECTION_FOR_HAND_GRASPING; std::vector<float> naoInSpace = spaceOrientation.getNaoPositionInRobotFrame(); cv::Point2d nao(naoInSpace[1], naoInSpace[0]); cv::Point2d distance = obj - nao; move(distance.y, distance.x, 0.0); #ifdef __linux__ try { AL::ALRobotPostureProxy robotPosture(robotIp); AL::ALMotionProxy motion(robotIp); bool postureReached = robotPosture.goToPosture("Stand", 0.5f); motion.waitUntilMoveIsFinished(); if (! postureReached) speech->say("Could not stand"); std::cout << "NAO position after walk: " << spaceOrientation.getNaoPositionInRobotFrame() << std::endl; } catch (const AL::ALError& e) { std::cerr << "Caught exception: " << e.what() << std::endl; exit(1); } #else std::cout << "walkToObject run not on linux" << std::endl; #endif }
int main(int argc, char **argv) { std::string robotIp = "127.0.0.1"; if (argc < 2) { std::cerr << "Usage: almotion_getMoveArmsEnable robotIp " << "(optional default \"127.0.0.1\")."<< std::endl; } else { robotIp = argv[1]; } AL::ALMotionProxy motion(robotIp); // Example showing how to get the enabled flags for the arms std::cout << "LeftArmEnabled: " << motion.getMoveArmsEnabled("LArm") << std::endl; std::cout << "RightArmEnabled: " << motion.getMoveArmsEnabled("RArm") << std::endl; std::cout << "BothArmsEnabled: " << motion.getMoveArmsEnabled("Arms") << std::endl; return 0; }
std::vector<float>& SpaceOrientation:: getNaoPositionInRobotFrame() { #ifdef __linux__ try { AL::ALMotionProxy motion(robotIp); bool useSensors = true; std::vector<float> sensorsPosition = motion.getRobotPosition(useSensors); // std::cout << "sensors: " << sensorsPosition << std::endl; cv::Point2d position(sensorsPosition[1], sensorsPosition[0]); position = rotatePoint(position, naoPositionOnStartInSpace[2]); // if (! naoPreviousPositionInSpace.size()) // { // naoPreviousPositionInSpace.push_back(0); // naoPreviousPositionInSpace.push_back(0); // naoPreviousPositionInSpace.push_back(0); // } // std::cout << "Position after rotation: " << // position << std::endl; naoPositionInSpace[0] = position.y - naoPositionOnStartInSpace[0]; naoPositionInSpace[1] = position.x - naoPositionOnStartInSpace[1]; // - naoPositionOnStartInSpace[1]; naoPositionInSpace[2] = sensorsPosition[2] - naoPositionOnStartInSpace[2]; // cv::Point2d naoPoint(naoPositionInSpace[1], // naoPositionInSpace[0]); } catch (const AL::ALError& e) { std::cerr << "Caught exception: " << e.what() << std::endl; exit(EXIT_FAILURE); } #else std::cout << "getNaoPositionInRobotFrame() not on Linux." << std::endl; #endif return naoPositionInSpace; }
void Locomotion:: move(float x, float y, float theta) { #ifdef __linux__ try { AL::ALMotionProxy motion(robotIp); motion.moveInit(); AL::ALNavigationProxy navigationProxy(robotIp,9559); if (USE_CORRECTION) { std::cout << "x, y before correction: " << x << " " << y << std::endl; //Lateral correction: -3/20 = -0.15; //LATERAL_CORRECTION = 0.15; //=> //lateralDistance = // forwardDistance*LATERAL_CORRECTION_FOR_FORWARD_MOVEMENT; //forwardDistance *= FORWARD_CORRECTION; //lateralDistance *= LATERAL_CORRECTION; y += x*LATERAL_CORRECTION_FOR_FORWARD_MOVEMENT; y *= LATERAL_CORRECTION; x *= FORWARD_CORRECTION; std::cout << "x, y after correction: " << x << " " << y << std::endl; } bool positionReached = navigationProxy.moveTo(x, y, theta); //, AL::ALValue::array("MaxStepX", 0.06)); motion.waitUntilMoveIsFinished(); if (! positionReached) speech->say("Could not walk to the object"); } catch (const AL::ALError& e) { std::cerr << "Caught exception: " << e.what() << std::endl; exit(1); } #endif }
// Public methods: std::vector<float> SpaceOrientation:: getBottomCameraPosition() { //Computed experimentally in position Stand: std::vector<float> position; position.push_back(0.0188118); position.push_back(0.0357351); position.push_back(0.481532); position.push_back(0); position.push_back(0); position.push_back(0); #ifdef __linux__ try { AL::ALMotionProxy motion(robotIp); // std::cout << motion.getSensorNames() << std::endl; // std::cout << motion.() << std::endl; bool useSensorValues = true; int space = 2; //FRAME_ROBOT position = motion.getPosition("CameraBottom", space, useSensorValues); std::cout << "Bottom camera: " << position << std::endl; // std::cout << "Bottom camera height angle: " << position[4]*180 / M_PI << std::endl; // std::cout << "Bottom camera width angle: " << position[3]*180 / M_PI << std::endl; // position.x = pos[1]; // position.y = pos[0]; // position.z = pos[2]; } catch (const AL::ALError& e) { std::cerr << "Caught exception: " << e.what() << std::endl; exit(EXIT_FAILURE); } #endif return position; }
void SpaceOrientation:: initializePosition() { #ifdef __linux__ try { AL::ALMotionProxy motion(robotIp); bool useSensors = true; naoPositionOnStartInSpace = motion.getRobotPosition(useSensors); // std::cout << "sensors: " << naoPositionOnStartInSpace << std::endl; cv::Point2d startPosition(naoPositionOnStartInSpace[1], naoPositionOnStartInSpace[0]); startPosition = rotatePoint(startPosition, naoPositionOnStartInSpace[2]); // std::cout << "rotation: " << rotatePoint(cv::Point2d(1,1), -M_PI/2); naoPositionOnStartInSpace[0] = startPosition.y; naoPositionOnStartInSpace[1] = startPosition.x; // std::cout << "Start position: " << startPosition << std::endl; naoPositionInSpace.push_back(0); naoPositionInSpace.push_back(0); naoPositionInSpace.push_back(0); naoPreviousPositionInSpace = naoPositionInSpace; } catch (const AL::ALError& e) { std::cerr << "Caught exception: " << e.what() << std::endl; exit(EXIT_FAILURE); } #else std::cout << "initializePosition() not on Linux." << std::endl; naoPositionInSpace.push_back(0); naoPositionInSpace.push_back(0); naoPositionInSpace.push_back(0); naoPreviousPositionInSpace = naoPositionInSpace; naoPositionOnStartInSpace = naoPositionInSpace; #endif }
bool predicate( CEntityAlive& ea, const SHit& H, MotionID &m, float &angle ) const { m = MotionID(); if( H.initiator() != Level().CurrentControlEntity()) return false; CObject* O = Level().Objects.net_Find( H.weaponID ); if(!O) return false; //static_cast<CGameObject*>(O)->cast_weapon() CWeaponShotgun* s = smart_cast< CWeaponShotgun* >( static_cast<CGameObject*>(O) ); if(!s) return false; Fvector p; const float max_distance = 20.f; if(Fvector().sub(H.initiator()->Position(),global_hit_position( p, ea, H )).magnitude() > max_distance) return false; edirection dr = dir( ea, H, angle ); m = motion( dr ); type_motion_diagnostic( " type_motion2: 3. Ўотган ", dr, ea, H, m ); return true; }
int main(int argc, char **argv) { std::string robotIp = "127.0.0.1"; if (argc < 2) { std::cerr << "Usage: almotion_getrobotposition robotIp " << "(optional default \"127.0.0.1\")."<< std::endl; } else { robotIp = argv[1]; } AL::ALMotionProxy motion(robotIp); // Example showing how to get a simplified robot position in world. bool useSensorValues = false; std::vector<float> result = motion.getRobotPosition(useSensorValues); std::cout << "Robot position is: " << result << std::endl; return 0; }