void CleanUpExit() { recorder.Release(); g_player.Release(); g_image.Release(); g_scriptNode.Release(); g_context.Release(); g_hands.Release(); g_gesture.Release(); free(g_pTexMap); exit(1); }
void XN_CALLBACK_TYPE Gesture_Recognized(xn::GestureGenerator& generator, const XnChar* strGesture, const XnPoint3D* pIDPosition, const XnPoint3D* pEndPosition, void* pCookie) { printf("Gesture recognized: %s\n", strGesture); g_gesture.RemoveGesture(strGesture); g_hands.StartTracking(*pEndPosition); std::cout << strGesture << std::endl; if (!strcmp(strGesture, "Wave")) { gesture = 1; ready_state = true; } if (!strcmp(strGesture, "Click")) { gesture = 2; ready_state = false; } if (!strcmp(strGesture, "RaiseHand")) {gesture = 3;} std::cout << gesture << std::endl; }
int main(int argc, char **argv) { lasttime = time(NULL); if(argc < 3) { printf("Usage : %s [address] [name] [port]\n", argv[0]); return EXIT_FAILURE; } //first send the application name sock = init_connection_module(argv[1],atoi(argv[3])); char buffer[BUF_SIZE]; //use of a descriptor in order to use non-blocking sockets fd_set rdfs; if(fcntl(sock, F_SETFL, O_NONBLOCK) < 0) printf("Error setting socket in non blocking mode\n"); else printf("Socket is in non blocking mode\n"); // send the Applcation's name write_server(sock, argv[2]); write_server(sock, createInitGestureXml().c_str()); XnStatus retVal = XN_STATUS_OK; //context creation Context context; //depth generator DepthGenerator depth; //for the led XN_USB_DEV_HANDLE usbHandle; bool foundUsb = false; const XnUSBConnectionString *paths; XnUInt32 count; //for tracking the user XnSkeletonJointPosition pos1, pos2; retVal = xnUSBInit(); //retVal = 0; if (retVal != XN_STATUS_OK) { xnPrintError(retVal, "xnUSBInit failed"); } else retVal = xnUSBEnumerateDevices(0x045E /* VendorID */, 0x02B0 /*ProductID*/, &paths, &count); if (retVal != XN_STATUS_OK) { xnPrintError(retVal, "xnUSBEnumerateDevices failed"); }else { retVal = xnUSBOpenDeviceByPath(paths[0], &usbHandle); foundUsb = true; } //sessiong manager - NITE XnVSessionManager* pSessionGenerator; //context init retVal = context.Init(); retVal = g_GestureGenerator.Create(context); retVal = g_HandsGenerator.Create(context); retVal = depth.Create(context); pSessionGenerator = new XnVSessionManager(); pSessionGenerator->Initialize(&context, "Wave", "RaiseHand"); //start generating data retVal = context.StartGeneratingAll(); /* session callbacks - START = when we detect focus or short focus gesture; STOP = when we loose track; PROGRESS = when we're interacting; */ pSessionGenerator->RegisterSession(NULL, &SessionStart, &SessionEnd, &SessionProgress); //swipe control XnVSwipeDetector sw; sw.RegisterSwipeLeft(NULL, swipeLeft); sw.RegisterSwipeRight(NULL, swipeRight); pSessionGenerator->AddListener(&sw); XnVPushDetector pw; pw.RegisterPush(NULL, push); pSessionGenerator->AddListener(&pw); XnVSteadyDetector st; st.RegisterSteady(NULL, steady); //pSessionGenerator->AddListener(&st); while(true) { //wait for data to be ready on depth node and update all nodes; retVal = context.WaitAnyUpdateAll(); if(retVal != XN_STATUS_OK) { cout << "failed updating data; reason: " << xnGetStatusString(retVal) << endl; continue; } pSessionGenerator->Update(&context); //network resetDescriptor(&sock, &rdfs); int n = read_server(sock, buffer); if(n > 0) printf("################## Received from server: %s ##############\n", buffer); } end_connection_module(sock); context.Release(); delete pSessionGenerator; system("PAUSE"); return EXIT_SUCCESS; }
int main(int argc, char* argv[]) { int nRetVal; XnStatus rc; EnumerationErrors errors; // get playback file if using if (argc > 2 && strcmp(argv[2], "true") == 0) { rc = g_context.Init(); rc = g_context.OpenFileRecording(RECORDING_PATH, g_player); CHECK_RC(rc, "Opening file"); rc = g_player.SetRepeat(TRUE); CHECK_RC(rc, "Turn repeat off"); } else { // get context from xml rc = g_context.InitFromXmlFile(SAMPLE_XML_PATH, g_scriptNode, &errors); } // error checking if (rc == XN_STATUS_NO_NODE_PRESENT) { XnChar strError[1024]; errors.ToString(strError, 1024); printf("%s\n", strError); return (rc); } CHECK_RC(rc, "Context initialization"); // get hand and image generator from context, check errors rc = g_context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image); CHECK_RC(rc, "Get image generator"); rc = g_context.FindExistingNode(XN_NODE_TYPE_HANDS, g_hands); CHECK_RC(rc, "Get hand generator"); rc = g_context.FindExistingNode(XN_NODE_TYPE_GESTURE, g_gesture); CHECK_RC(rc, "Get gesture generator"); // create and register callbacks XnCallbackHandle h1, h2; g_gesture.RegisterGestureCallbacks(Gesture_Recognized, Gesture_Process, NULL, h1); CHECK_RC(rc, "Get register gesture callback"); g_hands.RegisterHandCallbacks(Hand_Create, Hand_Update, Hand_Destroy, NULL, h2); CHECK_RC(rc, "Get hand callback"); // add gestures to the generator rc = g_gesture.AddGesture("Click", NULL); CHECK_RC(rc, " add click gesture"); rc = g_gesture.AddGesture("RaiseHand", NULL); CHECK_RC(rc, "add raise gesture"); rc = g_gesture.AddGesture("Wave", NULL); CHECK_RC(rc, "add wave gesture"); g_image.GetMetaData(g_imageMD); // RGB is the only image format supported. if (g_imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24) { printf("The device image format must be RGB24\n"); return 1; } // if argument is set true, then record the session if (argc > 1 && strcmp(argv[1], "true") == 0) { std::cout << "recording to " << RECORDING_PATH << std::endl; // Create Recorder rc = recorder.Create(g_context); CHECK_RC(rc, "create recorder"); // Init it rc = recorder.SetDestination(XN_RECORD_MEDIUM_FILE, RECORDING_PATH); CHECK_RC(rc, "init recorder"); // Add nodes to recording rc = recorder.AddNodeToRecording(g_image); CHECK_RC(rc, "add image node"); rc = recorder.AddNodeToRecording(g_hands); CHECK_RC(rc, "add hands node"); } // initialize and run program glutInit(&argc, argv); // GLUT initialization glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE | GLUT_DEPTH ); // Display Mode glutInitWindowSize(WIDTH, HEIGHT); // set window size glutInitWindowPosition(GL_WIN_POSITION_X, GL_WIN_POSITION_Y); glutCreateWindow(TITLE); // create Window glutDisplayFunc(glutDisplay); // register Display Function glutIdleFunc(glutDisplay); // register Idle Function glutKeyboardFunc(glutKeyboard ); // register Keyboard Handler initialize(); glutMainLoop(); CleanUpExit(); return 0; }
int main(int argc, char *argv[]) { //--------------------------------------------------------------------// //------------------------- SETUP REQUIRED NODES ---------------------// //--------------------------------------------------------------------// // Setup the command line parameters. setupParams(argc, argv); // Setup all the sockets. setupSockets(); // Setup the capture socket server for Mac. #if (XN_PLATFORM == XN_PLATFORM_MACOSX) if(_featureDepthMapCapture || _featureRGBCapture) { if(_useSockets) { g_AS3Network = network(); g_AS3Network.init(setupServer); } } #endif // Setup the status. XnStatus _status = XN_STATUS_OK; EnumerationErrors _errors; // Context Init and Add license. _status = _context.Init(); CHECK_RC(_status, "AS3OpenNI :: Initialize context"); _context.SetGlobalMirror(_mirror); XnChar vendor[XN_MAX_NAME_LENGTH]; XnChar license[XN_MAX_LICENSE_LENGTH]; _license.strVendor[XN_MAX_NAME_LENGTH] = strcmp(vendor, "PrimeSense"); _license.strKey[XN_MAX_LICENSE_LENGTH] = strcmp(license, "0KOIk2JeIBYClPWVnMoRKn5cdY4="); _status = _context.AddLicense(_license); CHECK_RC(_status, "AS3OpenNI :: Added license"); // Set it to VGA maps at 30 FPS _depthMode.nXRes = 640; _depthMode.nYRes = 480; _depthMode.nFPS = 30; // Depth map create. _status = _depth.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create depth generator"); _status = _depth.SetMapOutputMode(_depthMode); // Depth map create. _status = _image.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create image generator"); _status = _image.SetMapOutputMode(_depthMode); _status = _image.SetPixelFormat(XN_PIXEL_FORMAT_RGB24); // Create the hands generator. _status = _hands.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create hands generator"); _hands.SetSmoothing(0.1); // Create the gesture generator. _status = _gesture.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create gesture generator"); // Create user generator. _status = _userGenerator.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Find user generator"); // Create and initialize point tracker _sessionManager = new XnVSessionManager(); _status = _sessionManager->Initialize(&_context, "Wave", "RaiseHand"); if (_status != XN_STATUS_OK) { printf("AS3OpenNI :: Couldn't initialize the Session Manager: %s\n", xnGetStatusString(_status)); CleanupExit(); } _sessionManager->RegisterSession(NULL, &SessionStart, &SessionEnd, &SessionProgress); // Start catching signals for quit indications CatchSignals(&_quit); //---------------------------------------------------------------// //------------------------- SETUP FEATURES ---------------------// //--------------------------------------------------------------// // Define the Wave and SinglePoint detectors. _waveDetector = new XnVWaveDetector(); // SinglePoint detector. if(_featureSinglePoint) _waveDetector->RegisterPointUpdate(NULL, &OnPointUpdate); // Feature Gesture. if(_featureGesture) { // Wave detector. _waveDetector->RegisterWave(NULL, &OnWave); // Push detector. _pushDetector = new XnVPushDetector(); _pushDetector->RegisterPush(NULL, &onPush); // Swipe detector. _swipeDetector = new XnVSwipeDetector(); _swipeDetector->RegisterSwipeUp(NULL, &Swipe_SwipeUp); _swipeDetector->RegisterSwipeDown(NULL, &Swipe_SwipeDown); _swipeDetector->RegisterSwipeLeft(NULL, &Swipe_SwipeLeft); _swipeDetector->RegisterSwipeRight(NULL, &Swipe_SwipeRight); // Steady detector. _steadyDetector = new XnVSteadyDetector(); _steadyDetector->RegisterSteady(NULL, &Steady_OnSteady); } // Feature Circle. if(_featureCircle) { // Circle detector. _circleDetector = new XnVCircleDetector(); _circleDetector->RegisterCircle(NULL, &CircleCB); _circleDetector->RegisterNoCircle(NULL, &NoCircleCB); _circleDetector->RegisterPrimaryPointCreate(NULL, &Circle_PrimaryCreate); _circleDetector->RegisterPrimaryPointDestroy(NULL, &Circle_PrimaryDestroy); } // Feature Slider. if(_featureSlider) { // Left/Right slider. _leftRightSlider = new XnVSelectableSlider1D(3, 0, AXIS_X); _leftRightSlider->RegisterActivate(NULL, &LeftRightSlider_OnActivate); _leftRightSlider->RegisterDeactivate(NULL, &LeftRightSlider_OnDeactivate); _leftRightSlider->RegisterPrimaryPointCreate(NULL, &LeftRightSlider_OnPrimaryCreate); _leftRightSlider->RegisterPrimaryPointDestroy(NULL, &LeftRightSlider_OnPrimaryDestroy); _leftRightSlider->RegisterValueChange(NULL, &LeftRightSlider_OnValueChange); _leftRightSlider->SetValueChangeOnOffAxis(false); // Up/Down slider. _upDownSlider = new XnVSelectableSlider1D(3, 0, AXIS_Y); _upDownSlider->RegisterActivate(NULL, &UpDownSlider_OnActivate); _upDownSlider->RegisterDeactivate(NULL, &UpDownSlider_OnDeactivate); _upDownSlider->RegisterPrimaryPointCreate(NULL, &UpDownSlider_OnPrimaryCreate); _upDownSlider->RegisterPrimaryPointDestroy(NULL, &UpDownSlider_OnPrimaryDestroy); _upDownSlider->RegisterValueChange(NULL, &UpDownSlider_OnValueChange); _upDownSlider->SetValueChangeOnOffAxis(false); // In/Out slider. _inOutSlider = new XnVSelectableSlider1D(3, 0, AXIS_Z); _inOutSlider->RegisterActivate(NULL, &InOutSlider_OnActivate); _inOutSlider->RegisterDeactivate(NULL, &InOutSlider_OnDeactivate); _inOutSlider->RegisterPrimaryPointCreate(NULL, &InOutSlider_OnPrimaryCreate); _inOutSlider->RegisterPrimaryPointDestroy(NULL, &InOutSlider_OnPrimaryDestroy); _inOutSlider->RegisterValueChange(NULL, &InOutSlider_OnValueChange); _inOutSlider->SetValueChangeOnOffAxis(false); } // Feature TrackPad. if(_featureTrackPad) { // Track Pad. if(trackpad_columns > 0 && trackpad_rows > 0) { _trackPad = new XnVSelectableSlider2D(trackpad_columns, trackpad_rows); } else { _trackPad = new XnVSelectableSlider2D(4, 9); } _trackPad->RegisterItemHover(NULL, &TrackPad_ItemHover); _trackPad->RegisterItemSelect(NULL, &TrackPad_ItemSelect); _trackPad->RegisterPrimaryPointCreate(NULL, &TrackPad_PrimaryCreate); _trackPad->RegisterPrimaryPointDestroy(NULL, &TrackPad_PrimaryDestroy); } // Feature User Tracking. if(_featureUserTracking) { // Setup user generator callbacks. XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks; if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) { printf("AS3OpenNI :: Supplied user generator doesn't support skeleton\n"); return 1; } _userGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks); // Setup Skeleton detection. _userGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks); if (_userGenerator.GetSkeletonCap().NeedPoseForCalibration()) { _needPose = true; if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) { printf("AS3OpenNI :: Pose required, but not supported\n"); return 1; } _userGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks); _userGenerator.GetSkeletonCap().GetCalibrationPose(_strPose); } _userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL); } // Create the broadcaster manager. _broadcaster = new XnVBroadcaster(); // Start generating all. _context.StartGeneratingAll(); // Set the frame rate. _status = xnFPSInit(&xnFPS, 180); CHECK_RC(_status, "AS3OpenNI :: FPS Init"); //----------------------------------------------------------------------// //------------------------- SETUP DISPLAY SUPPORT ---------------------// //--------------------------------------------------------------------// // Setup depth and image data. _depth.GetMetaData(_depthData); _image.GetMetaData(_imageData); // Hybrid mode isn't supported in this sample if (_imageData.FullXRes() != _depthData.FullXRes() || _imageData.FullYRes() != _depthData.FullYRes()) { printf ("AS3OpenNI :: The device depth and image resolution must be equal!\n"); return 1; } // RGB is the only image format supported. if (_imageData.PixelFormat() != XN_PIXEL_FORMAT_RGB24) { printf("AS3OpenNI :: The device image format must be RGB24\n"); return 1; } // Setup the view points to match between the depth and image maps. if(_snapPixels) _depth.GetAlternativeViewPointCap().SetViewPoint(_image); //-------------------------------------------------------------// //------------------------- MAIN LOOP ------------------------// //-----------------------------------------------------------// // Setup the capture socket server for PC. #if (XN_PLATFORM == XN_PLATFORM_WIN32) if(_featureDepthMapCapture || _featureRGBCapture || _featureUserTracking) { if(_useSockets) { g_AS3Network = network(); g_AS3Network.init(setupServer); } } #endif // Main loop while ((!_kbhit()) && (!_quit)) { xnFPSMarkFrame(&xnFPS); _context.WaitAndUpdateAll(); _sessionManager->Update(&_context); if(_featureDepthMapCapture) captureDepthMap(g_ucDepthBuffer); if(_featureRGBCapture) captureRGB(g_ucImageBuffer); #if (XN_PLATFORM == XN_PLATFORM_WIN32) if(_featureUserTracking) getPlayers(); #else if(_featureUserTracking) renderSkeleton(); #endif } CleanupExit(); }