// Set up OpenNI to obtain 8-bit mono images from the Kinect's RGB camera int kinectInit(void) { XnStatus nRetVal = XN_STATUS_OK; ScriptNode scriptNode; EnumerationErrors errors; printf("Reading config from: '%s'\n", SAMPLE_XML_PATH_LOCAL); nRetVal = context.InitFromXmlFile(SAMPLE_XML_PATH_LOCAL, scriptNode, &errors); nRetVal = context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image); //g_image.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT); g_image.SetPixelFormat(XN_PIXEL_FORMAT_RGB24); g_image.GetMetaData(g_imageMD); nRetVal = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth); depth.GetMetaData(depthMD); // nRetVal = depth.GetAlternativeViewPointCap().SetViewPoint(g_image); //nRetVal = depth.GetFrameSyncCap().FrameSyncWith(g_image); return nRetVal; }
int main(int argc, char *argv[]) { //--------------------------------------------------------------------// //------------------------- SETUP REQUIRED NODES ---------------------// //--------------------------------------------------------------------// // Setup the command line parameters. setupParams(argc, argv); // Setup all the sockets. setupSockets(); // Setup the capture socket server for Mac. #if (XN_PLATFORM == XN_PLATFORM_MACOSX) if(_featureDepthMapCapture || _featureRGBCapture) { if(_useSockets) { g_AS3Network = network(); g_AS3Network.init(setupServer); } } #endif // Setup the status. XnStatus _status = XN_STATUS_OK; EnumerationErrors _errors; // Context Init and Add license. _status = _context.Init(); CHECK_RC(_status, "AS3OpenNI :: Initialize context"); _context.SetGlobalMirror(_mirror); XnChar vendor[XN_MAX_NAME_LENGTH]; XnChar license[XN_MAX_LICENSE_LENGTH]; _license.strVendor[XN_MAX_NAME_LENGTH] = strcmp(vendor, "PrimeSense"); _license.strKey[XN_MAX_LICENSE_LENGTH] = strcmp(license, "0KOIk2JeIBYClPWVnMoRKn5cdY4="); _status = _context.AddLicense(_license); CHECK_RC(_status, "AS3OpenNI :: Added license"); // Set it to VGA maps at 30 FPS _depthMode.nXRes = 640; _depthMode.nYRes = 480; _depthMode.nFPS = 30; // Depth map create. _status = _depth.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create depth generator"); _status = _depth.SetMapOutputMode(_depthMode); // Depth map create. _status = _image.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create image generator"); _status = _image.SetMapOutputMode(_depthMode); _status = _image.SetPixelFormat(XN_PIXEL_FORMAT_RGB24); // Create the hands generator. _status = _hands.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create hands generator"); _hands.SetSmoothing(0.1); // Create the gesture generator. _status = _gesture.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Create gesture generator"); // Create user generator. _status = _userGenerator.Create(_context); CHECK_RC(_status, "AS3OpenNI :: Find user generator"); // Create and initialize point tracker _sessionManager = new XnVSessionManager(); _status = _sessionManager->Initialize(&_context, "Wave", "RaiseHand"); if (_status != XN_STATUS_OK) { printf("AS3OpenNI :: Couldn't initialize the Session Manager: %s\n", xnGetStatusString(_status)); CleanupExit(); } _sessionManager->RegisterSession(NULL, &SessionStart, &SessionEnd, &SessionProgress); // Start catching signals for quit indications CatchSignals(&_quit); //---------------------------------------------------------------// //------------------------- SETUP FEATURES ---------------------// //--------------------------------------------------------------// // Define the Wave and SinglePoint detectors. _waveDetector = new XnVWaveDetector(); // SinglePoint detector. if(_featureSinglePoint) _waveDetector->RegisterPointUpdate(NULL, &OnPointUpdate); // Feature Gesture. if(_featureGesture) { // Wave detector. _waveDetector->RegisterWave(NULL, &OnWave); // Push detector. _pushDetector = new XnVPushDetector(); _pushDetector->RegisterPush(NULL, &onPush); // Swipe detector. _swipeDetector = new XnVSwipeDetector(); _swipeDetector->RegisterSwipeUp(NULL, &Swipe_SwipeUp); _swipeDetector->RegisterSwipeDown(NULL, &Swipe_SwipeDown); _swipeDetector->RegisterSwipeLeft(NULL, &Swipe_SwipeLeft); _swipeDetector->RegisterSwipeRight(NULL, &Swipe_SwipeRight); // Steady detector. _steadyDetector = new XnVSteadyDetector(); _steadyDetector->RegisterSteady(NULL, &Steady_OnSteady); } // Feature Circle. if(_featureCircle) { // Circle detector. _circleDetector = new XnVCircleDetector(); _circleDetector->RegisterCircle(NULL, &CircleCB); _circleDetector->RegisterNoCircle(NULL, &NoCircleCB); _circleDetector->RegisterPrimaryPointCreate(NULL, &Circle_PrimaryCreate); _circleDetector->RegisterPrimaryPointDestroy(NULL, &Circle_PrimaryDestroy); } // Feature Slider. if(_featureSlider) { // Left/Right slider. _leftRightSlider = new XnVSelectableSlider1D(3, 0, AXIS_X); _leftRightSlider->RegisterActivate(NULL, &LeftRightSlider_OnActivate); _leftRightSlider->RegisterDeactivate(NULL, &LeftRightSlider_OnDeactivate); _leftRightSlider->RegisterPrimaryPointCreate(NULL, &LeftRightSlider_OnPrimaryCreate); _leftRightSlider->RegisterPrimaryPointDestroy(NULL, &LeftRightSlider_OnPrimaryDestroy); _leftRightSlider->RegisterValueChange(NULL, &LeftRightSlider_OnValueChange); _leftRightSlider->SetValueChangeOnOffAxis(false); // Up/Down slider. _upDownSlider = new XnVSelectableSlider1D(3, 0, AXIS_Y); _upDownSlider->RegisterActivate(NULL, &UpDownSlider_OnActivate); _upDownSlider->RegisterDeactivate(NULL, &UpDownSlider_OnDeactivate); _upDownSlider->RegisterPrimaryPointCreate(NULL, &UpDownSlider_OnPrimaryCreate); _upDownSlider->RegisterPrimaryPointDestroy(NULL, &UpDownSlider_OnPrimaryDestroy); _upDownSlider->RegisterValueChange(NULL, &UpDownSlider_OnValueChange); _upDownSlider->SetValueChangeOnOffAxis(false); // In/Out slider. _inOutSlider = new XnVSelectableSlider1D(3, 0, AXIS_Z); _inOutSlider->RegisterActivate(NULL, &InOutSlider_OnActivate); _inOutSlider->RegisterDeactivate(NULL, &InOutSlider_OnDeactivate); _inOutSlider->RegisterPrimaryPointCreate(NULL, &InOutSlider_OnPrimaryCreate); _inOutSlider->RegisterPrimaryPointDestroy(NULL, &InOutSlider_OnPrimaryDestroy); _inOutSlider->RegisterValueChange(NULL, &InOutSlider_OnValueChange); _inOutSlider->SetValueChangeOnOffAxis(false); } // Feature TrackPad. if(_featureTrackPad) { // Track Pad. if(trackpad_columns > 0 && trackpad_rows > 0) { _trackPad = new XnVSelectableSlider2D(trackpad_columns, trackpad_rows); } else { _trackPad = new XnVSelectableSlider2D(4, 9); } _trackPad->RegisterItemHover(NULL, &TrackPad_ItemHover); _trackPad->RegisterItemSelect(NULL, &TrackPad_ItemSelect); _trackPad->RegisterPrimaryPointCreate(NULL, &TrackPad_PrimaryCreate); _trackPad->RegisterPrimaryPointDestroy(NULL, &TrackPad_PrimaryDestroy); } // Feature User Tracking. if(_featureUserTracking) { // Setup user generator callbacks. XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks; if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) { printf("AS3OpenNI :: Supplied user generator doesn't support skeleton\n"); return 1; } _userGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks); // Setup Skeleton detection. _userGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks); if (_userGenerator.GetSkeletonCap().NeedPoseForCalibration()) { _needPose = true; if (!_userGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) { printf("AS3OpenNI :: Pose required, but not supported\n"); return 1; } _userGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks); _userGenerator.GetSkeletonCap().GetCalibrationPose(_strPose); } _userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL); } // Create the broadcaster manager. _broadcaster = new XnVBroadcaster(); // Start generating all. _context.StartGeneratingAll(); // Set the frame rate. _status = xnFPSInit(&xnFPS, 180); CHECK_RC(_status, "AS3OpenNI :: FPS Init"); //----------------------------------------------------------------------// //------------------------- SETUP DISPLAY SUPPORT ---------------------// //--------------------------------------------------------------------// // Setup depth and image data. _depth.GetMetaData(_depthData); _image.GetMetaData(_imageData); // Hybrid mode isn't supported in this sample if (_imageData.FullXRes() != _depthData.FullXRes() || _imageData.FullYRes() != _depthData.FullYRes()) { printf ("AS3OpenNI :: The device depth and image resolution must be equal!\n"); return 1; } // RGB is the only image format supported. if (_imageData.PixelFormat() != XN_PIXEL_FORMAT_RGB24) { printf("AS3OpenNI :: The device image format must be RGB24\n"); return 1; } // Setup the view points to match between the depth and image maps. if(_snapPixels) _depth.GetAlternativeViewPointCap().SetViewPoint(_image); //-------------------------------------------------------------// //------------------------- MAIN LOOP ------------------------// //-----------------------------------------------------------// // Setup the capture socket server for PC. #if (XN_PLATFORM == XN_PLATFORM_WIN32) if(_featureDepthMapCapture || _featureRGBCapture || _featureUserTracking) { if(_useSockets) { g_AS3Network = network(); g_AS3Network.init(setupServer); } } #endif // Main loop while ((!_kbhit()) && (!_quit)) { xnFPSMarkFrame(&xnFPS); _context.WaitAndUpdateAll(); _sessionManager->Update(&_context); if(_featureDepthMapCapture) captureDepthMap(g_ucDepthBuffer); if(_featureRGBCapture) captureRGB(g_ucImageBuffer); #if (XN_PLATFORM == XN_PLATFORM_WIN32) if(_featureUserTracking) getPlayers(); #else if(_featureUserTracking) renderSkeleton(); #endif } CleanupExit(); }