void ObjHero::actionWalk(Node* target, Point des){ Point destination = getPointInMap(des); assert(rootObj); rootObj->stopAllActions(); setState(EobjState::E_WALK); if((destination.x < rootObj->getPosition().x && dir == EDIR_FORWARD) || (destination.x > rootObj->getPosition().x && dir == EDIR_BACKWARD)){ turnAround(); } Animation* walk = createAnimateWithFileNames("HeroRun%d.png", 11); walk->setDelayPerUnit(0.1); auto action = Animate::create(walk); float distance = sqrtf((destination.x - rootObj->getPositionX()) * (destination.x - rootObj->getPositionX()) + (destination.y - rootObj->getPositionY()) * (destination.y - rootObj->getPositionY())); float dur = distance / speed; setDesPoin(des); auto moveTo = MoveTo::create(dur, destination); rootObj->runAction(RepeatForever::create(action)); rootObj->runAction(CCSequence::create(moveTo, CallFunc::create( CC_CALLBACK_0(ObjHero::actionStand,this)), NULL)); //target->runAction(Follow::create(rootObj)); }
void lifeObj::moveAway(Rect rect){ Point pt = getrootObj()->getPosition(); Point ot = Point(rect.origin.x + rect.size.width / 2.0f, rect.origin.y + rect.size.height / 2.0f); Rect selfRec = getShadowRect(); // Point pt = selfRec.origin; // float lx = rect.size.width / 2 + selfRec.size.width / 2; // float ly = rect.size.height / 2 + selfRec.size.width / 2; // // float dx = selfRec.origin.x - rect.origin.x; // //dx = dx < 0 ? -dx : dx; // float dy = selfRec.origin.y - rect.origin.y; // //dy = dy < 0 ? -dy : dy; // // Point des = Point(dx - absf rect.origin.x - selfRec.origin.x) // // Point selfLeftB = Point(selfRec.origin.x - selfRec.size.width / 2, selfRec.origin.y - selfRec.size.height / 2); if (pt.x > rect.origin.x) { pt.x = rect.origin.x + rect.size.width + selfRec.size.width / 2.0f; } else if(pt.x < rect.origin.x){ pt.x = rect.origin.x - rect.size.width - selfRec.size.width / 2.0f; } if (pt.y > rect.origin.y) { pt.y = rect.origin.y + rect.size.height + selfRec.size.height / 2.0f; } else if(pt.y < rect.origin.y){ pt.y = rect.origin.y - rect.size.height - selfRec.size.height / 2.0f; } Point des = getPointInMap(pt); this->getrootObj()->setPosition(des); //CCLOG("%f %f",pt.x, pt.y); //CCLOG("%f %f",rect.origin.x, rect.origin.y); }
void ObjHero::actionRusn(){ if(getState() == EobjState::E_RUSH) return; assert(rootObj); rootObj->stopAllActions(); setState(EobjState::E_RUSH); int flag = 1; if(dir == EDIR_FORWARD) flag = 1; else flag = -1; int distance = 60; Point p = ccpAdd(rootObj->getPosition(), ccp(distance * flag, 0)); Animation* rush = createAnimateWithFileNames("HeroAttackT%d.png", 3); rush->setDelayPerUnit(0.1); auto actionRush = Animate::create(rush); rootObj->runAction(actionRush); MoveTo* moveTo = MoveTo::create(0.3, getPointInMap(p)); rootObj->runAction(CCSequence::create(moveTo, CallFunc::create( CC_CALLBACK_0(ObjHero::actionStand,this)), NULL)); attackEffect(); }
void LCBattleScene::ccTouchesEnded(CCSet *pTouches, CCEvent *pEvent) { if(!moved){ CCTouch* touch = dynamic_cast<CCTouch*>(*pTouches->begin()); CCPoint position = NodeHelper::getPositionToELFDesigner(hero->getrootObj(), touch->getLocation()); //sendPosition(position); hero->actionWalk(getPointInMap(position)); return; } if (mPath.size() < 1) { return; } RecognitionResult result = test->recognize(mPath); CCLog("%s %lf",result.name.c_str(), result.score); if(strcmp(result.name.c_str(), "V") == 0){ hero->actionJump(); }else if(strcmp(result.name.c_str(), "LeftToRightLine") == 0){ hero->actionRusn(forward); }else if(strcmp(result.name.c_str(), "RightToLeftLine") == 0){ hero->actionRusn(backward); } mPath.clear(); }
void Kinect_Plugin::Main() { std::cout << "Starting KINECTplugin" << std::endl; XnStatus nRetVal = XN_STATUS_OK; xn::EnumerationErrors errors; KNI_DEV->getInstance().Initialize(); Human::getInstance().Initialize("../data/Kinect/Skeleton"); //Gui::getInstance(); //Tab *pluginTab; //Video *polo; //pluginTab = new Tab("Kinect_Plugin"); //polo=new Video(10,10,640,480,"polo", pluginTab); nRetVal = KNI_DEV->getInstance().openDeviceFromXml(SAMPLE_XML_PATH, errors); if (nRetVal == XN_STATUS_NO_NODE_PRESENT) { XnChar strError[1024]; errors.ToString(strError, 1024); printf("%s\n", strError); } else if (nRetVal != XN_STATUS_OK) { printf("Open failed: %s\n", xnGetStatusString(nRetVal)); }else std::cout << "KINECTplugin started" << std::endl; nRetVal = KNI_DEV->getInstance().g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, *KNI_DEV->getInstance().getImageGenerator()); nRetVal = KNI_DEV->getInstance().g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, *KNI_DEV->getInstance().getDepthGenerator()); KNI_DEV->getInstance().getDepthGenerator()->GetAlternativeViewPointCap().SetViewPoint(*KNI_DEV->getInstance().getImageGenerator()); nRetVal = KNI_DEV->getInstance().g_Context.FindExistingNode(XN_NODE_TYPE_USER, *KNI_DEV->getInstance().getUserGenerator()); // nRetVal = KNI_DEV->getInstance().g_Context.FindExistingNode(XN_NODE_TYPE_GESTURE, *KNI_DEV->getInstance().getGestureGenerator()); nRetVal = KNI_DEV->getInstance().g_Context.FindExistingNode(XN_NODE_TYPE_HANDS, *KNI_DEV->getInstance().getHandsGenerator()); // //KNI_DEV->getInstance().g_Scene.GetMetaData(KNI_DEV->getInstance().g_SceneMD ); XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected; if (!KNI_DEV->getInstance().g_User.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) { printf("Supplied user generator doesn't support skeleton\n"); } nRetVal = KNI_DEV->getInstance().g_User.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks); nRetVal = KNI_DEV->getInstance().g_User.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart); nRetVal = KNI_DEV->getInstance().g_User.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete); nRetVal = KNI_DEV->getInstance().g_User.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected); KNI_DEV->getInstance().g_User.GetSkeletonCap().GetCalibrationPose(g_strPose); KNI_DEV->getInstance().g_User.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL); // Create the Session Manager KNI_DEV->getInstance().pSessionGenerator = new XnVSessionManager(); nRetVal = ((XnVSessionManager*)KNI_DEV->getInstance().pSessionGenerator)->Initialize(&KNI_DEV->getInstance().g_Context, "Wave", "RaiseHand"); if (nRetVal != XN_STATUS_OK) { printf("Session Manager couldn't initialize: %s\n", xnGetStatusString(nRetVal)); delete KNI_DEV->getInstance().pSessionGenerator; }else printf("Session Manager initialized: %s\n", xnGetStatusString(nRetVal)); nRetVal = KNI_DEV->getInstance().g_Context.StartGeneratingAll(); KNI_DEV->getInstance().pSessionGenerator->RegisterSession(NULL, &SessionStart, &SessionEnd, &SessionProgress); KNI_DEV->getInstance().wc.RegisterWave(NULL, OnWaveCB); KNI_DEV->getInstance().wc.RegisterPointUpdate(NULL, OnPointUpdate); KNI_DEV->getInstance().pSessionGenerator->AddListener(&KNI_DEV->getInstance().wc); XnUserID aUsers[MAX_NUM_USERS]; XnUInt16 nUsers; XnSkeletonJointTransformation Joint[24]; printf("Starting to run\n"); if(g_bNeedPose) { printf("Assume calibration pose\n"); } XnUInt32 epochTime = 0; IplImage* imgDepth16u=cvCreateImage(cvSize(640,480),IPL_DEPTH_16U,1); IplImage* imgRGB8u=cvCreateImage(cvSize(1280,1024),IPL_DEPTH_8U,3); IplImage* depthinfo=cvCreateImage(cvSize(640,480),IPL_DEPTH_8U,1); IplImage* imageinfo=cvCreateImage(cvSize(1280,1024),IPL_DEPTH_8U,3); IplImage* sceneinfo=cvCreateImage(cvSize(640,480),IPL_DEPTH_8U,3); XnUInt32 total_frames=0; //string action = "payAttention"; for(;;) { KNI_DEV->getInstance().readFrame(); KNI_DEV->getInstance().g_Context.WaitAnyUpdateAll(); ((XnVSessionManager*)KNI_DEV->getInstance().pSessionGenerator)->Update(&KNI_DEV->getInstance().g_Context); memcpy(imgDepth16u->imageData, KNI_DEV->getInstance().getDepthMetaData()->Data(),640*480*2); cvConvertScale(imgDepth16u,depthinfo,255/4096.0,0); memcpy(imgRGB8u->imageData,KNI_DEV->getInstance().getImageMetaData()->Data(),1280*1024*3); cvCvtColor(imgRGB8u,imageinfo,CV_RGB2BGR); sharedMemory->getInstance().kinectInfo->set_depth(depthinfo); sharedMemory->getInstance().kinectInfo->set_RGB(imageinfo); if(sharedMemory->getInstance().getAction()=="computePoint") { cout<<"Starting: "<< sharedMemory->getInstance().getAction() << " STATE in Kinect"<<endl; do { usleep(5000); } while(sharedMemory->getInstance().getObjectPositionX()==0 || sharedMemory->getInstance().getObjectPositionY()==0); std::cout << "voy a calcular usando " << sharedMemory->getInstance().getObjectPositionX() << " " << sharedMemory->getInstance().getObjectPositionY() << " " << std::endl; double x,y,z; getdephtmm(sharedMemory->getInstance().getObjectPositionX(), sharedMemory->getInstance().getObjectPositionY(),x, y,z); sharedMemory->getInstance().setRealObjectPositionX(x); sharedMemory->getInstance().setRealObjectPositionY(y); sharedMemory->getInstance().setRealObjectPositionZ(z); std::cout << "ya calcule el punto y obtuve " << sharedMemory->getInstance().getRealObjectPositionX() << " " << sharedMemory->getInstance().getRealObjectPositionY() << " " << sharedMemory->getInstance().getRealObjectPositionZ() << std::endl; sharedMemory->getInstance().setObjectPositionX(0); sharedMemory->getInstance().setObjectPositionY(0); sharedMemory->getInstance().setAction("graspObject"); //si da un valor atípico //TODO revisar, si el objeto si esta en esta coordenadas, esto s epodría ciclar //if(sharedMemory->getInstance().getRealObjectPositionX()>750 || sharedMemory->getInstance().getRealObjectPositionZ()<200) //{ //sharedMemory->getInstance().setAction("recognizeObject"); //} //else{ //sharedMemory->getInstance().setAction(cambiar_estado("punto_calculado", "si")); //} } if(sharedMemory->getInstance().getAction()=="payAttention") { if (firstTime){ t_ini = clock(); firstTime=false; Human::getInstance().gesture_detected=false; } cout<<"Starting: "<<sharedMemory->getInstance().getAction() << " STATE in Kinect"<<endl; if(Human::getInstance().gesture_detected) { cout << "**GESTURE DETECTED in payAttention" << endl; sharedMemory->getInstance().setGestureDepthPosition(Human::getInstance().gesture[2]); sharedMemory->getInstance().startDownToRotations=false; if (sharedMemory->getInstance().getTestRunning()=="Emergency"){ sharedMemory->getInstance().sintetizer.set_Phrase("Emergency Situation Detected"); //saving image of victim cv::imwrite("../data/EmergencyReport/imgPeersonHurt.png" ,cv::Mat( sharedMemory->getInstance().kinectInfo->get_RGB())); cv::Mat mapa; mapa= cv::imread("../data/map.png"); //saving victim on map cv::Scalar color= cv::Scalar(0,0,255); std::cout << "sharedMemory->getInstance().getRobotPosition().get_X()=" << sharedMemory->getInstance().getRobotPosition().get_X() <<std::endl; std::cout << "sharedMemory->getInstance().getRobotPosition().get_Y()=" << sharedMemory->getInstance().getRobotPosition().get_Y() <<std::endl; std::cout << "PIXEL_X=" << (sharedMemory->getInstance().getRobotPosition().get_X()/50)+ORIGIN_X << std::endl; std::cout << "PIXEL_Y=" << (sharedMemory->getInstance().getRobotPosition().get_Y()/50)+ORIGIN_Y << std::endl; cv::circle(mapa, cv::Point((sharedMemory->getInstance().getRobotPosition().get_X()/50)+ORIGIN_X,(sharedMemory->getInstance().getRobotPosition().get_Y()/50)+ORIGIN_Y), 7, color, 10, 8, 0); cv::imwrite("../data/EmergencyReport/locationPeersonHurt.png",mapa); Location temp=getPointInMap(sharedMemory->getInstance().getRobotPosition()); sharedMemory->getInstance().lastObjective->setObjectivePosition(temp); //sharedMemory->getInstance().setStringDestination("bedroom1"); //sharedMemory->getInstance().setAction("navigateCloseTo"); sharedMemory->getInstance().setAction("navigateToPoint"); }else{ sharedMemory->getInstance().sintetizer.set_Phrase("I have seen a guest requesting"); sharedMemory->getInstance().startDownToRotations=false; Location temp=getPointInMap(sharedMemory->getInstance().getRobotPosition()); sharedMemory->getInstance().lastObjective->setObjectivePosition(temp); sharedMemory->getInstance().setAction("navigateToPoint"); } firstTime=true; }else{ t_fin = clock(); //cout << "**NO Gesture in payAttention" << endl; //supone que la diferencia la da en segundos //std::cout << "TIME " << t_ini << " " << t_fin << std::endl; //nanosegundosf firstTime = false; if ((t_fin/1000000)-(t_ini/1000000)>30){ cout << "**Time OVER" << endl; firstTime=true; sharedMemory->getInstance().setAction("turn"); } } //cout << "gesture NO detected" << endl; //sleep(1); } if(sharedMemory->getInstance().getAction()=="Find_person") { cout<<"Starting: "<< sharedMemory->getInstance().getAction() << " STATE in Kinect"<<endl; KNI_DEV->getInstance().g_User.GetUserPixels(2, KNI_DEV->getInstance().g_SceneMD); const XnLabel* pLabels = KNI_DEV->getInstance().g_SceneMD.Data(); //TODO Error? cvSet(sceneinfo, cvScalar(0,0,0)); for (XnUInt y = 0; y < KNI_DEV->getInstance().g_DepthMD.YRes(); ++y) for (XnUInt x = 0; x < KNI_DEV->getInstance().g_DepthMD.XRes(); ++x, ++pLabels) { if(*pLabels) { CV_IMAGE_ELEM(sceneinfo, uchar, y, (x*3)+0)=255; CV_IMAGE_ELEM(sceneinfo, uchar, y, (x*3)+1)=255; CV_IMAGE_ELEM(sceneinfo, uchar, y, (x*3)+2)=255; } // if(*pLabels==2) // { // CV_IMAGE_ELEM(sceneinfo, uchar, y, (x*3)+1)=255; // } // if(*pLabels==3) // { // CV_IMAGE_ELEM(sceneinfo, uchar, y, (x*3)+2)=255; // } } // sharedMemory->getInstance().kinectInfo->set_user(sceneinfo); // cvShowImage("polo" ,sceneinfo); // cvWaitKey(100); } if(sharedMemory->getInstance().getAction()=="transformKinectToMapPoint") { cout<<"Starting: "<< sharedMemory->getInstance().getAction() << " STATE in Kinect"<<endl; //supone que es en mm float robotx=sharedMemory->getInstance().getRobotPosition().get_X(); float roboty=sharedMemory->getInstance().getRobotPosition().get_Y(); float robotth=sharedMemory->getInstance().getRobotPosition().get_Angle() * DEG_TO_RAD; float humanx=Human::getInstance().gesture[2]; //con respecto a robot es X y con respecto a kinect Z float humany=Human::getInstance().gesture[0]*-1; //con respecto a robot Y y con respecto a kinect X float X=robotx+((humanx)*cos(robotth)-(humany)*sin(robotth)); float Y=roboty+((humanx)*sin(robotth)+(humany)*cos(robotth)); //TODO Guardarlo en memoria compartida } if(sharedMemory->getInstance().getAction()=="follow") { cout<<"Starting: "<< sharedMemory->getInstance().getAction() << " STATE in Kinect"<<endl; nUsers=MAX_NUM_USERS; KNI_DEV->getInstance().g_User.GetUsers(aUsers, nUsers); int numTracked=0; int userToPrint=-1; XnPoint3D pt; for(XnUInt16 i=0; i<nUsers; i++) { skeletonJoint Torso; if(KNI_DEV->getInstance().g_User.GetSkeletonCap().IsTracking(aUsers[i])==FALSE) { Human::getInstance().tracking=-1; continue; } else { //patrick // g_UserGenerator.GetSkeletonCap().SetSmoothing(.1); std::map<std::string, skeletonJoint>::iterator iter; for(iter=Human::getInstance().Skeleton[i].begin(); iter !=Human::getInstance().Skeleton[i].end(); ++iter) { KNI_DEV->getInstance().g_User.GetSkeletonCap().GetSkeletonJoint(aUsers[i], (XnSkeletonJoint) iter->second.Get_ID() ,Joint[iter->second.Get_ID()]); iter->second.Set_pos(Joint[iter->second.Get_ID()].position.position.X, Joint[iter->second.Get_ID()].position.position.Y, (Joint[iter->second.Get_ID()].position.position.Z)); rotate(Joint[iter->second.Get_ID()].orientation.orientation.elements); iter->second.heading=heading; iter->second.attitude=attitude; iter->second.bank=bank; // std::cout << heading << attitude << bank << std::endl; if ( iter->second.Get_name()=="TORSO" ) Torso=iter->second; { if(Torso.Get_x() < -500 || Torso.Get_x()> 500) { Human::getInstance().tracking=-1; } else { Human::getInstance().tracking=i; } } } } } } } return; }