Exemple #1
0
void OniTrackApp::setup()
{
	bRemoting = FALSE;
	
	XnStatus rc = context.InitFromXmlFile(SAMPLE_XML_FILE);
	if (rc != XN_STATUS_OK)
	{
		printf("Couldn't initialize: %s\n", xnGetStatusString(rc));
		shutdown();
	}
	
	pSessionGenerator = new XnVSessionManager();
	rc = ((XnVSessionManager*)pSessionGenerator)->Initialize(&context, "Click", "RaiseHand");
	if (rc != XN_STATUS_OK)
	{
		printf("Session Manager couldn't initialize: %s\n", xnGetStatusString(rc));
		delete pSessionGenerator;
		shutdown();
	}
	
	
	context.StartGeneratingAll();
	
	pSessionGenerator->RegisterSession(NULL, &SessionStart, &SessionEnd, &SessionProgress);
	
	
	wc.RegisterWave(NULL, OnWaveCB);
	wc.RegisterPointUpdate(NULL, OnPointUpdate);
	pSessionGenerator->AddListener(&wc);
	
	printf("Please perform focus gesture to start session\n");
	printf("Hit any key to exit\n");
	
}
int main(int argc, char **argv)
{
	XnStatus nRetVal = XN_STATUS_OK;

	if (argc > 1)
	{
		nRetVal = g_Context.Init();
		CHECK_RC(nRetVal, "Init");
		nRetVal = g_Context.OpenFileRecording(argv[1]);
		if (nRetVal != XN_STATUS_OK)
		{
			printf("Can't open recording %s: %s\n", argv[1], xnGetStatusString(nRetVal));
			return 1;
		}
	}
	else
	{
		nRetVal = g_Context.InitFromXmlFile(SAMPLE_XML_PATH);
		CHECK_RC(nRetVal, "InitFromXml");
	}
	g_tunnel = new Tunnel();

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(nRetVal, "Find depth generator");
	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

	XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
	{
		printf("Supplied user generator doesn't support skeleton\n");
		return 1;
	}
	g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);

	if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
	{
		g_bNeedPose = TRUE;
		if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
		{
			printf("Pose required, but not supported\n");
			return 1;
		}
		g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);
		g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");

	glInit(&argc, argv);
	glutMainLoop();
}
Exemple #3
0
void AllocateAllGlobals()
{
  // Configure
  XnStatus rc = g_Context.InitFromXmlFile(SAMPLE_XML_FILE, g_ScriptNode);
  if (rc != XN_STATUS_OK)
    {
      printf("Error initializing: %s\n", xnGetStatusString(rc));
      CleanupExit();
    }
  
  if(NULL == g_pSessionManager)
    {
      g_pSessionManager = new XnVSessionManager();
    }
  if(NULL == g_pSessionManager)
    {
      printf("Couldn't create PointTracker!! (out of memory)\n");
      CleanupExit();
    }
  
  if(NULL == g_pTrackPad)
    {
      g_pTrackPad = new XnVSelectableSlider2D(g_TP_XDim, g_TP_YDim);
    }
  if(NULL == g_pTrackPad)
    {
      printf("Couldn't create TrackPad!! (out of memory)\n");
      CleanupExit();
    }
  
  g_Context.StartGeneratingAll();
}
Exemple #4
0
    // OpenNIの初期化
    void initOpenNI( const std::string& recordFileName )
    {
        // コンテキストの初期化
        XnStatus nRetVal = context.Init();
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // 記録したファイルを開く
        nRetVal = context.OpenFileRecording( recordFileName.c_str() );
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // プレーヤーを作成する
        nRetVal = context.FindExistingNode(XN_NODE_TYPE_PLAYER, player);
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // デバイスを作成する
        nRetVal = audio.Create(context);
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // WAVEデータを取得する
        nRetVal = audio.GetWaveOutputMode(waveMode);
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // データの取得を開始する
        context.StartGeneratingAll();
    }
void initialize()
{
	std::cout << endl << "Initializing Kinect... " << endl << endl;

	g_RetVal = g_Context.Init();

	g_RetVal = g_DepthGenerator.Create(g_Context);
	if (g_RetVal != XN_STATUS_OK)
		printf("Failed creating DEPTH generator %s\n", xnGetStatusString(g_RetVal));

	XnMapOutputMode outputMode;
	outputMode.nXRes = g_im_w;
	outputMode.nYRes = g_im_h;
	outputMode.nFPS = g_fps;
	g_RetVal = g_DepthGenerator.SetMapOutputMode(outputMode);
	if (g_RetVal != XN_STATUS_OK)
		printf("Failed setting the DEPTH output mode %s\n", xnGetStatusString(g_RetVal));

	g_RetVal = g_Context.StartGeneratingAll();
	if (g_RetVal != XN_STATUS_OK)
		printf("Failed starting generating all %s\n", xnGetStatusString(g_RetVal));

	g_DepthGenerator.GetIntProperty ("ZPD", g_focal_length);
	g_DepthGenerator.GetRealProperty ("ZPPS", g_pixel_size);
	g_pixel_size *= 2.f;

	g_im3D.create(g_im_h,g_im_w,CV_32FC3);
}
// this sample can run either as a regular sample, or as a client for multi-process (remote mode)
int main(int argc, char** argv)
{
  xn::Context context;
  xn::ScriptNode scriptNode;
  XnVSessionGenerator* pSessionGenerator;
  XnBool bRemoting = FALSE;

  XnStatus rc = XN_STATUS_OK;
  xn::EnumerationErrors errors;

  rc = g_Context.InitFromXmlFile(SAMPLE_XML_FILE, g_ScripeNode, &errors);
  //CHECK_ERRORS(rc, errors, "InitFromXmlFile");
  //CHECK_RC(rc, "InitFromXml");

  rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
  //CHECK_RC(rc, "Find depth generator");
  rc = g_Context.FindExistingNode(XN_NODE_TYPE_SCENE, g_SceneAnalyzer);
  //CHECK_RC(rc, "Find scene analyzer");

  rc = g_Context.StartGeneratingAll();
  //CHECK_RC(rc, "StartGenerating");

  // Main loop
  while (!xnOSWasKeyboardHit())
  {
  }

  return 0;
}
bool SetupPrimesense(void)
{
	XnStatus nRetVal = XN_STATUS_OK;
	xn::ScriptNode scriptNode;
	xn::EnumerationErrors errors;

	nRetVal = g_context.Init();
	//nRetVal = g_context.InitFromXmlFile("./config.xml", scriptNode, &errors); 
        if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
        {
                XnChar strError[1024];
                errors.ToString(strError, 1024);
                printf("%s\n", strError);
                return (nRetVal);
        }
        else if (nRetVal != XN_STATUS_OK)
        {
                printf("Open failed: %s\n", xnGetStatusString(nRetVal));
                return (nRetVal);
        }

	SetupDepth(g_context);
	//SetupImage(g_context);
	//SetupIR(g_context);

	if ((nRetVal = g_context.StartGeneratingAll()) != XN_STATUS_OK)
	{
		fprintf(stderr,"Could not start: %s\n", xnGetStatusString(nRetVal));
		return FALSE;
	}
	return TRUE;
}
Exemple #8
0
int main(int argc, char ** argv)
{
	XnStatus rc = XN_STATUS_OK;

	// Initialize OpenNI
	rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH);
	CHECK_RC(rc, "InitFromXmlFile");

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, "Find depth generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_HANDS, g_HandsGenerator);
	CHECK_RC(rc, "Find hands generator");

	// Create NITE objects
	g_pSessionManager = new XnVSessionManager;
	rc = g_pSessionManager->Initialize(&g_Context, "Click,Wave", "RaiseHand");
	CHECK_RC(rc, "SessionManager::Initialize");

	g_pSessionManager->RegisterSession(NULL, SessionStarting, SessionEnding, NULL);

	g_pDrawer = new XnVPointDrawer(20, g_DepthGenerator);
	g_pFlowRouter = new XnVFlowRouter;
	g_pFlowRouter->SetActive(g_pDrawer);
	g_pSessionManager->AddListener(g_pFlowRouter);

	g_pDrawer->RegisterNoPoints(NULL, NoHands);
	g_pDrawer->SetDepthMap(g_bDrawDepthMap);

	// init & register circle control
	XnVCircleDetector circle;
	circle.RegisterCircle(NULL, OnCircleCB);
	circle.RegisterNoCircle(NULL, OnNoCircleCB);
	g_pSessionManager->AddListener(&circle);

	// init & register swipe control
	XnVSwipeDetector swipe;
	swipe.RegisterSwipeUp(NULL, OnSwipeUpCB);
	swipe.RegisterSwipeDown(NULL, OnSwipeDownCB);
	swipe.RegisterSwipeLeft(NULL, OnSwipeLeftCB);
	swipe.RegisterSwipeRight(NULL, OnSwipeRightCB);
	g_pSessionManager->AddListener(&swipe);

	// init & register wave control
	XnVWaveDetector wave;
	wave.RegisterWave(NULL, OnWaveCB);
	g_pSessionManager->AddListener(&wave);

	// Initialization done. Start generating
	rc = g_Context.StartGeneratingAll();
	CHECK_RC(rc, "StartGenerating");

	// Mainloop
	glInit(&argc, argv);
	glutMainLoop();
}
Exemple #9
0
bool DataCapture::startDataCapture()
{
    XnStatus rc = context_.StartGeneratingAll();
    if( rc != XN_STATUS_OK )
    {
        std::cout << "StartGeneratingAll: " << xnGetStatusString(rc) << std::endl;
        return false;
    }

    return true;
}
Exemple #10
0
	information()
	{
		RC(context.Init(),							"Context Intialized");
		
		XnMapOutputMode mode;
		mode.nXRes = XN_VGA_X_RES;
		mode.nYRes = XN_VGA_Y_RES;
		mode.nFPS  = 30;
		
		RC(image.Create(context),					"Create image buffer");
		RC(image.SetMapOutputMode(mode),			"Set image mode");
		
		RC(depth.Create(context),					"Create depth buffer");
		RC(depth.SetMapOutputMode(mode),			"Set depth mode");
		
		xn::Query q;
		RC(q.AddSupportedCapability(XN_CAPABILITY_SKELETON), "Request skeleton");

		try {
			RC(context.FindExistingNode(XN_NODE_TYPE_USER, user), "User generator");
		} catch (...) {
			RC(user.Create(context),					"Get skeleton!!!");
		}
//		RC(user.Create(context, &q),					"Get skeleton!!!");
//		
//		xn::NodeInfoList il;
//		RC(context.EnumerateProductionTrees(XN_NODE_TYPE_USER, &q, il, NULL),
//													"Enumerate nodes");
//		
//		xn::NodeInfo i = *il.Begin();
//		RC(context.CreateProductionTree(i),			"Create skeleton node");
//		RC(i.GetInstance(user),						"Get skeleton");
		
		user.RegisterUserCallbacks(User_NewUser, NULL, NULL, hUserCallbacks);
		user.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, &user, hCalibrationCallbacks);
		
		if (user.GetSkeletonCap().NeedPoseForCalibration())
		{
			if (!user.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
			{
				post("Pose required, but not supported\n");
			}
			else
			{
				user.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, &user, hPoseCallbacks);
				user.GetSkeletonCap().GetCalibrationPose(g_strPose);
				user.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
			}
		}
		
		RC(context.StartGeneratingAll(),			"Start generating data");
		
		post("Kinect initialized!\n");
	}
Exemple #11
0
int main(int argc, char *argv[])
{
  if(argc==2){
    gl_init(&argc, argv);

    xn_init(argv[1]);
    gContext.StartGeneratingAll();
    glutMainLoop();
  }

  return 0;
}
Exemple #12
0
/**   
	* @fn initKinect
	* Start Kinect
    * This is where you start the kinect and s are checked for connection of the same
    * If you activate the state variables and unicia session for Kinect
    * Starts motion capture
    * The flow of movements
    * It is finished start the kinect once you register a movement with the same
*/
int initKinect()
{
	XnStatus rc = XN_STATUS_OK;
	xn::EnumerationErrors errors;

	rc = g_Context.InitFromXmlFile(SAMPLE_XML_FILE, g_ScriptNode, &errors);
	CHECK_ERRORS(rc, errors, KNT_Msgs[KNT_INIT_FROM_XML_FILE]);
	CHECK_RC(rc, KNT_Msgs[KNT_INIT_FROM_XML]);

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, KNT_Msgs[KNT_FIND_DEPTH_GEN]);

	// Create and initialize point tracker
	g_pSessionManager = new XnVSessionManager;
	rc = g_pSessionManager->Initialize(&g_Context, "Wave", "RaiseHand");
	if (rc != XN_STATUS_OK)
	{
		printf(KNT_Msgs[KNT_COULD_NOT_INIT_SESSION], xnGetStatusString(rc));
		delete g_pSessionManager;
		return rc;
	}

	g_pSessionManager->RegisterSession(NULL, &SessionStart, &SessionEnd);

	// Start catching signals for quit indications
	CatchSignals(&g_bQuit);

	// Create and initialize the main slider
	g_pMainSlider = new XnVSelectableSlider1D(3);
	g_pMainSlider->RegisterItemSelect(NULL, &MainSlider_OnSelect);
	g_pMainSlider->RegisterActivate(NULL, &MainSlider_OnActivate);
	g_pMainSlider->RegisterDeactivate(NULL, &MainSlider_OnDeactivate);
	g_pMainSlider->RegisterPrimaryPointCreate(NULL, &MainSlider_OnPrimaryCreate);
	g_pMainSlider->RegisterPrimaryPointDestroy(NULL, &MainSlider_OnPrimaryDestroy);
	g_pMainSlider->RegisterValueChange(NULL, &MainSlider_OnValueChange);
	g_pMainSlider->SetValueChangeOnOffAxis(true);

	// Creat the flow manager
	g_pMainFlowRouter = new XnVFlowRouter;

	// Connect flow manager to the point tracker
	g_pSessionManager->AddListener(g_pMainFlowRouter);

	g_Context.StartGeneratingAll();

	g_init_kinect = true;

	printf(KNT_Msgs[KNT_WAVE_GESTURE]);
	printf(KNT_Msgs[KNT_HIT_ANY_2_EXIT]); 

	return 0;
}
Exemple #13
0
    // OpenNIの初期化
    void initOpenNI()
    {
        // コンテキストの初期化
        XnStatus nRetVal = context.Init();
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // デバイスを作成する(XMLからの生成だと、デバイスがないといわれる)
        nRetVal = audio.Create(context);
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // 取得するWAVEデータの設定
        waveMode.nSampleRate = 44100;
        waveMode.nChannels = 2;
        waveMode.nBitsPerSample = 16;
        nRetVal = audio.SetWaveOutputMode(waveMode);
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // レコーダーの作成
        nRetVal = recorder.Create( context );
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // 出力先の設定
        nRetVal = recorder.SetDestination( XN_RECORD_MEDIUM_FILE, RECORDE_PATH );
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // 記録するノードの追加
        nRetVal = recorder.AddNodeToRecording(audio, XN_CODEC_UNCOMPRESSED);
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // 記録の開始
        nRetVal = recorder.Record();
        if (nRetVal != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(nRetVal));
        }

        // データの取得を開始する
        context.StartGeneratingAll();
    }
Exemple #14
0
bool OpenNIVideo::start() 
{ 
	XnStatus nRetVal = XN_STATUS_OK;

	nRetVal = context.StartGeneratingAll();
	CHECK_RC(nRetVal, "start generators");

	nRetVal = xnFPSInit(&xnFPS, 180);
	CHECK_RC(nRetVal, "FPS Init");

	_videoStreamList[0]->play();	

	return true;
}
int main(int argc, char **argv)
{
	XnStatus rc = XN_STATUS_OK;
	xn::EnumerationErrors errors;

	rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH);
	CHECK_ERRORS(rc, errors, "InitFromXmlFile");
	CHECK_RC(rc, "InitFromXml");

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, "Find depth generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_SCENE, g_SceneAnalyzer);
	CHECK_RC(rc, "Find scene analyzer");

	rc = g_Context.StartGeneratingAll();
	CHECK_RC(rc, "StartGenerating");

	#ifdef USE_GLUT

	glInit(&argc, argv);
	glutMainLoop();

	#elif defined(USE_GLES)

	if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context))
	{
		printf("Error initing opengles\n");
		CleanupExit();
	}

	glDisable(GL_DEPTH_TEST);
//	glEnable(GL_TEXTURE_2D);
	glEnableClientState(GL_VERTEX_ARRAY);
	glDisableClientState(GL_COLOR_ARRAY);

	while ((!_kbhit()) && (!g_bQuit))
	{
		glutDisplay();
		eglSwapBuffers(display, surface);
	}

	opengles_shutdown(display, surface, context);

	CleanupExit();

	#endif
}
Exemple #16
0
bool kinect_reader2::init() {
	nRetVal = XN_STATUS_OK;

	nRetVal = g_Context.Init();
	if (nRetVal != XN_STATUS_OK) {
		printf("Creating context failed: %s\n", xnGetStatusString(nRetVal));
		return false;
	}

	nRetVal = g_DepthGenerator.Create(g_Context);
	if(nRetVal != XN_STATUS_OK) {
		printf("Creating depth generator failed: %s\n", xnGetStatusString(nRetVal));
		return false;
	}
	g_DepthGenerator.GetMirrorCap().SetMirror(true);

	nRetVal = g_UserGenerator.Create(g_Context);
	if(nRetVal != XN_STATUS_OK) {
		printf("Creating user generator failed: %s\n", xnGetStatusString(nRetVal));
		return false;
	}

	XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
	{
		printf("Supplied user generator doesn't support skeleton\n");
		return false;
	}
	g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
	g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
	g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);

	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
	{
		printf("Pose required, but not supported\n");
		return false;
	}
	g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	g_Context.StartGeneratingAll();

	return true;
}
Exemple #17
0
int configKinect(){
		
	XnStatus rc = XN_STATUS_OK;
	xn::EnumerationErrors errors;

	// Initialize OpenNI
	rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, g_ScriptNode, &errors);
	CHECK_ERRORS(rc, errors, "InitFromXmlFile");
	CHECK_RC(rc, "InitFromXmlFile");

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, "Find depth generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_GESTURE, g_GestureGenerator);
	CHECK_RC(rc, "Find gesture generator");

	XnCallbackHandle hGestureIntermediateStageCompleted, hGestureProgress, hGestureReadyForNextIntermediateStage;
	g_GestureGenerator.RegisterToGestureIntermediateStageCompleted(GestureIntermediateStageCompletedHandler, NULL, hGestureIntermediateStageCompleted);
	g_GestureGenerator.RegisterToGestureReadyForNextIntermediateStage(GestureReadyForNextIntermediateStageHandler, NULL, hGestureReadyForNextIntermediateStage);
	g_GestureGenerator.RegisterGestureCallbacks(NULL, GestureProgressHandler, NULL, hGestureProgress);

	// Create NITE objects
	g_pSessionManager = new XnVSessionManager;
	rc = g_pSessionManager->Initialize(&g_Context, "Click,Wave", "RaiseHand");
	CHECK_RC(rc, "SessionManager::Initialize");

	g_pSessionManager->RegisterSession(NULL, SessionStarting, SessionEnding, FocusProgress);

	pointHandler = new PointHandler(20, g_DepthGenerator); 
	g_pFlowRouter = new XnVFlowRouter;
	g_pFlowRouter->SetActive(pointHandler);

	g_pSessionManager->AddListener(g_pFlowRouter);

	pointHandler->RegisterNoPoints(NULL, NoHands);

	// Initialization done. Start generating
	rc = g_Context.StartGeneratingAll();
	CHECK_RC(rc, "StartGenerating");

	return rc;
}
Exemple #18
0
/** First cl argument is the bus id of the device, second is the device id.
    Both are optional
*/
int main(int argc, char **argv)
{
    XnStatus nRetVal = XN_STATUS_OK;

    base64_init();
    parseUsbDevice(argc, argv);
    initEnvironment(g_deviceSerial);
    SceneDrawerInit();
    initFromContextFile();
    registerUserCallbacks();
    nRetVal = g_Context.StartGeneratingAll();

    CHECK_RC(nRetVal, "StartGenerating");

#if USE_MEMCACHE
    initializeKestrelConnection();
#endif

    openglMainLoop(argc, argv);

    return 0;
}
int main(int argc, char **argv) {
  ros::init(argc, argv, "scene_tracker");
  ros::NodeHandle nh;

  string configFilename = ros::package::getPath("openni_tracker") + "/openni_tracker.xml";
  XnStatus nRetVal = g_Context.InitFromXmlFile(configFilename.c_str());
  CHECK_RC(nRetVal, "InitFromXml");

  nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
  CHECK_RC(nRetVal, "Find depth generator");

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK) {
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

  XnCallbackHandle hUserCallbacks;
	g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");

	ros::Rate r(30);

        
  ros::NodeHandle pnh("~");
  string frame_id("openni_depth_frame");
  pnh.getParam("camera_frame_id", frame_id);
                
	while (ros::ok()) {
		g_Context.WaitAndUpdateAll();
		publishTransforms(frame_id);
		r.sleep();
	}

	g_Context.Shutdown();
	return 0;
}
int main(int argc, char **argv)
{
    XnStatus nRetVal = XN_STATUS_OK;
    xn::EnumerationErrors errors;
    TotalFrames = 0;
    static struct rusage ru;
    long double t;
    double w;
    struct timeval timStart;
    struct timeval timEnd;
    struct timeval Wall;
    bool Sample = true;
    XnFPSData xnFPS;
    XnUserID aUsers[MAX_NUM_USERS];
	XnUInt16 nUsers;
	XnSkeletonJointTransformation torsoJoint;

    if (argc > 1)
	{

    	//parse the cmd line
    	if (strcasecmp(argv[1],"--help") == 0)
    	{
    		PrintHelpHeader(argv[0]);
    		return 1;
    	}
    	numOfUser = atoi(argv[1]);
    	if(numOfUser == 0)
    	{
    		PrintHelpHeader(argv[0]);
    		return 1;
    	}
    	else if(numOfUser > 2)
    	{
    		printf("Maximal Users allowed is 2\n");
    		return 1;
    	}

	}
    else
    {
    	numOfUser = 4;
    }

    const char *fn = NULL;
    if    (fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
    else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
    else {
        printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
        return XN_STATUS_ERROR;
    }
    printf("Reading config from: '%s'\n", fn);

    nRetVal = g_Context.InitFromXmlFile(fn, g_scriptNode, &errors);
    if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
    {
        XnChar strError[1024];
        errors.ToString(strError, 1024);
        printf("%s\n", strError);
        return (nRetVal);
    }
    else if (nRetVal != XN_STATUS_OK)
    {
        printf("Open failed: %s\n", xnGetStatusString(nRetVal));
        return (nRetVal);
    }

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
    CHECK_RC(nRetVal,"No depth");

#if (XN_PLATFORM != XN_PLATFORM_MACOSX)	
    //we want out benchmark application will be running only on one CPU core
    cpu_set_t mask;
	CPU_ZERO(&mask);
	CPU_SET(1,&mask);
	sched_setaffinity(0,sizeof(mask),&mask);
#endif	
	//initialize the FPS calculator
	nRetVal = xnFPSInit(&xnFPS, 90);
	CHECK_RC(nRetVal, "FPS Init");
	//ensure the User generator exists
    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    if (nRetVal != XN_STATUS_OK)
    {
        nRetVal = g_UserGenerator.Create(g_Context);
        CHECK_RC(nRetVal, "Find user generator");
    }
    //register to generators callbacks
    XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
    {
        printf("Supplied user generator doesn't support skeleton\n");
        return 1;
    }
    nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    CHECK_RC(nRetVal, "Register to user callbacks");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
    CHECK_RC(nRetVal, "Register to calibration start");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
    CHECK_RC(nRetVal, "Register to calibration complete");
    //ensure calibration
    if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
    {
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
        {
            printf("Pose required, but not supported\n");
            return 1;
        }
        nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
        CHECK_RC(nRetVal, "Register to Pose Detected");
        g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
    }
    //set skeleton profile (all joints)
    g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
    //start to generate
    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");



    printf("%c[%d;%d;%dmPrimeSense Skeleton Benchmark Application\n ", 0x1B, BRIGHT,BLUE,BG_BLACK);
    printf("%c[%d;%d;%dmSet Maximal users to %d ", 0x1B, BRIGHT,BLUE,BG_BLACK,numOfUser);
	printf("%c[%dm\n", 0x1B, 0);
    printf("Starting to run\n");
    if(g_bNeedPose)
    {
        printf("Assume calibration pose\n");
    }

    XnUInt32 epochTime = 0;
    //each 30 frames (1 second) we start the CPU resources usages
    while (!xnOSWasKeyboardHit())
    {
    	if (Sample)
    	{	//get the beginning sample of CPU resources
    		getrusage(RUSAGE_SELF, &ru);
    		timStart=ru.ru_utime;
    		t=(double)timStart.tv_sec * 1000000.0 + (double)timStart.tv_usec \
    		   + (double)ru.ru_stime.tv_sec*1000000.0+(double)ru.ru_stime.tv_usec;
    		//get the wall clock time
    		gettimeofday(&Wall,NULL);

    		w=(double)Wall.tv_sec * 1000000.0 + (double)Wall.tv_usec;
    		Sample = false;
    	}
    	g_Context.WaitOneUpdateAll(g_UserGenerator);
    	xnFPSMarkFrame(&xnFPS);
        // print the torso information for the first user already tracking every 1 second to prevent CPU of printf
        if(TotalFrames % 30 == 0)
        {
			nUsers=MAX_NUM_USERS;
			g_UserGenerator.GetUsers(aUsers, nUsers);
			int numTracked=0;
			int userToPrint=-1;
			for(XnUInt16 i=0; i<nUsers; i++)
			{
				if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])==FALSE)
					continue;

				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_TORSO,torsoJoint);
				printf("User %d Located At distance of %6.2f mm from the sensor\n",aUsers[i],torsoJoint.position.position.Z);

			 }
			//get the finish sample of the CPU resources
			getrusage(RUSAGE_SELF, &ru);
			timEnd=ru.ru_utime;
			t = (double)timEnd.tv_sec * 1000000.0 + (double)timEnd.tv_usec \
				+	(double)ru.ru_stime.tv_sec*1000000.0+(double)ru.ru_stime.tv_usec	- t;
			//get the wall clock
			gettimeofday(&Wall,NULL);

			w = (double)Wall.tv_sec * 1000000.0 + (double)Wall.tv_usec - w;

			XnDouble fps=xnFPSCalc(&xnFPS);
			//print stuff.
			printf("%c[%d;%d;%dmCPU Utilization=%3.2f%%\t", 0x1B, BRIGHT,RED,BG_BLACK,(double)100*t/w);
			printf("%c[%d;%d;%dmFPS=%3.2f ", 0x1B, BRIGHT,RED,BG_BLACK,(double)fps);
			printf("%c[%dm\n", 0x1B, 0);
			Sample= true;

		}
        TotalFrames++;
        
    }
    g_scriptNode.Release();
    g_DepthGenerator.Release();
    g_UserGenerator.Release();
    g_Context.Release();

}
int main(int argc, char **argv) {
    ros::init(argc, argv, "tracker_2");
    ros::NodeHandle nh;

    string configFilename = ros::package::getPath("openni_tracker_multi") + "/openni_tracker.xml";
    XnStatus nRetVal = g_Context.InitFromXmlFile(configFilename.c_str());
    CHECK_RC(nRetVal, "InitFromXml");

//    XnStatus nRetVal = XN_STATUS_OK;

    //xnLogInitFromXmlFile(csXmlFile);

    nRetVal = g_Context.Init();
    XN_IS_STATUS_OK(nRetVal);


   // SELECTION OF THE DEVICE
    xn::EnumerationErrors errors;
    xn::Device g_Device;
        // find devices
    xn::NodeInfoList list;
    xn::NodeInfoList list_depth;
        nRetVal = g_Context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, list, &errors);
    XN_IS_STATUS_OK(nRetVal);

    printf("The following devices were found:\n");
        int i = 1;
        for (xn::NodeInfoList::Iterator it = list.Begin(); it != list.End(); ++it, ++i)
        {
            xn::NodeInfo deviceNodeInfo = *it;

            xn::Device deviceNode;
            deviceNodeInfo.GetInstance(deviceNode);
            XnBool bExists = deviceNode.IsValid();
            if (!bExists)
            {
                g_Context.CreateProductionTree(deviceNodeInfo, deviceNode);
                // this might fail.
            }

            if (deviceNode.IsValid() && deviceNode.IsCapabilitySupported(XN_CAPABILITY_DEVICE_IDENTIFICATION))
            {
                const XnUInt32 nStringBufferSize = 200;
                XnChar strDeviceName[nStringBufferSize];
                XnChar strSerialNumber[nStringBufferSize];

                XnUInt32 nLength = nStringBufferSize;
                deviceNode.GetIdentificationCap().GetDeviceName(strDeviceName, nLength);
                nLength = nStringBufferSize;
                deviceNode.GetIdentificationCap().GetSerialNumber(strSerialNumber, nLength);
                printf("[%d] %s (%s)\n", i, strDeviceName, strSerialNumber);
            }
            else
            {
                printf("[%d] %s\n", i, deviceNodeInfo.GetCreationInfo());
            }

            // release the device if we created it
            if (!bExists && deviceNode.IsValid())
            {
                deviceNode.Release();
            }
        }
        printf("\n");
        printf("Choose device to open (1): ");

        int chosen=0;
        if (argc != 2)
        {
            printf("Choose device to open (1): ");
            int nRetval = scanf("%d", &chosen);
        }
        else
        {
        chosen = atoi(argv[1]);
        }

            // create it
            xn::NodeInfoList::Iterator it = list.Begin();
            for (i = 1; i < chosen; ++i)
            {
                it++;
            }

        xn::NodeInfo deviceNode = *it;
        nRetVal = g_Context.CreateProductionTree(deviceNode, g_Device);
        printf("Production tree of the device created.\n");

    // SELECTION OF THE DEPTH GENERATOR
        nRetVal = g_Context.EnumerateProductionTrees(XN_NODE_TYPE_DEPTH, NULL, list_depth, &errors);
        XN_IS_STATUS_OK(nRetVal);

        printf("The following devices were found:\n");
            int i_depth = 1;
            for (xn::NodeInfoList::Iterator it_depth = list_depth.Begin(); it_depth != list_depth.End(); ++it_depth, ++i_depth)
            {
                xn::NodeInfo depthNodeInfo = *it_depth;

                xn::Device depthNode;
                depthNodeInfo.GetInstance(depthNode);
                XnBool bExists_depth = depthNode.IsValid();
                if (!bExists_depth)
                {
                    g_Context.CreateProductionTree(depthNodeInfo, depthNode);
                    // this might fail.
                }

                if (depthNode.IsValid() && depthNode.IsCapabilitySupported(XN_CAPABILITY_DEVICE_IDENTIFICATION))
                {
                    const XnUInt32 nStringBufferSize = 200;
                    XnChar strDeviceName[nStringBufferSize];
                    XnChar strSerialNumber[nStringBufferSize];

                    XnUInt32 nLength = nStringBufferSize;
                    depthNode.GetIdentificationCap().GetDeviceName(strDeviceName, nLength);
                    nLength = nStringBufferSize;
                    depthNode.GetIdentificationCap().GetSerialNumber(strSerialNumber, nLength);
                    printf("[%d] %s (%s)\n", i, strDeviceName, strSerialNumber);
                }
                else
                {
                    printf("[%d] %s\n", i, depthNodeInfo.GetCreationInfo());
                }

                // release the device if we created it
                if (!bExists_depth && depthNode.IsValid())
                {
                    depthNode.Release();
                }
            }
            printf("\n");
        printf("Choose device to open (1): ");

        int chosen_depth = 1;
        /*if (argc != 2)
        {
            printf("Choose device to open (1): ");
            int nRetval_depth = scanf("%d", &chosen_depth);
        }
        else
        {
        chosen_depth = atoi(argv[1]);
        }*/


            // create it
            xn::NodeInfoList::Iterator it_depth = list_depth.Begin();
            for (i = 1; i < chosen_depth; ++i)
            {
                it_depth++;
            }

        xn::NodeInfo depthNode = *it_depth;
        nRetVal = g_Context.CreateProductionTree(depthNode, g_DepthGenerator);
        printf("Production tree of the DepthGenerator created.\n");

        nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
        printf("Production tree of the depth generator created.\n");
        XN_IS_STATUS_OK(nRetVal);
        printf("XN_IS_STATUS_OK(nRetVal).\n");



    CHECK_RC(nRetVal, "Find depth generator");
     printf("CHECK_RC(nRetVal, Find depth generator);\n");

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    printf("User generator found.\n");
    if (nRetVal != XN_STATUS_OK) {
        nRetVal = g_UserGenerator.Create(g_Context);
        printf("User generator created.\n");
        CHECK_RC(nRetVal, "Find user generator");
    }

    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
        printf("Supplied user generator doesn't support skeleton.\n");
        ROS_INFO("Supplied user generator doesn't support skeleton");
        return 1;
    }

    XnCallbackHandle hUserCallbacks;
    g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);

    XnCallbackHandle hCalibrationCallbacks;
    g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);

    if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration()) {
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) {
            ROS_INFO("Pose required, but not supported");
            return 1;
        }

        XnCallbackHandle hPoseCallbacks;
        g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);

        g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
    }

    g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");

    ros::Rate r(30);


        ros::NodeHandle pnh("~");
        string frame_id("/kinect2_depth_frame");
        pnh.getParam("camera_frame_id", frame_id);

    while (ros::ok()) {
        g_Context.WaitAndUpdateAll();
        publishTransforms(frame_id);
        r.sleep();
    }

    g_Context.Shutdown();
    return 0;
}
Exemple #22
0
int main(int argc, char **argv)
{
    XnStatus nRetVal = XN_STATUS_OK;
    xn::EnumerationErrors errors;
    
    if( USE_RECORED_DATA ){
        g_Context.Init();
        g_Context.OpenFileRecording(RECORD_FILE_PATH);
        xn::Player player;
        
        // Player nodeの取得
        nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_PLAYER, player);
        CHECK_RC(nRetVal, "Find player");
        
        LOG_D("PlaybackSpeed: %d", player.GetPlaybackSpeed());
        
        xn:NodeInfoList nodeList;
        player.EnumerateNodes(nodeList);
        for( xn::NodeInfoList::Iterator it = nodeList.Begin();
            it != nodeList.End(); ++it){
            
            if( (*it).GetDescription().Type == XN_NODE_TYPE_IMAGE ){
                nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator);
                CHECK_RC(nRetVal, "Find image node");
                LOG_D("%s", "ImageGenerator created.");
            }
            else if( (*it).GetDescription().Type == XN_NODE_TYPE_DEPTH ){
                nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
                CHECK_RC(nRetVal, "Find depth node");
                LOG_D("%s", "DepthGenerator created.");            
            }
            else{
                LOG_D("%s %s %s", ::xnProductionNodeTypeToString((*it).GetDescription().Type ),
                      (*it).GetInstanceName(),
                      (*it).GetDescription().strName);
            }
        }
    }
    else{
        LOG_I("Reading config from: '%s'", CONFIG_XML_PATH);
        
        nRetVal = g_Context.InitFromXmlFile(CONFIG_XML_PATH, g_scriptNode, &errors);
        if (nRetVal == XN_STATUS_NO_NODE_PRESENT){
            XnChar strError[1024];
            errors.ToString(strError, 1024);
            LOG_E("%s\n", strError);
            return (nRetVal);
        }
        else if (nRetVal != XN_STATUS_OK){
            LOG_E("Open failed: %s", xnGetStatusString(nRetVal));
            return (nRetVal);
        }
        
        nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
        CHECK_RC(nRetVal,"No depth");
        
        // ImageGeneratorの作成
        nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator);
        CHECK_RC(nRetVal, "Find image generator");
        
    }
    // UserGeneratorの取得
    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    if(nRetVal!=XN_STATUS_OK){
        nRetVal = g_UserGenerator.Create(g_Context); 
        CHECK_RC(nRetVal, "Create user generator");
    }

    
    XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)){
        LOG_E("%s", "Supplied user generator doesn't support skeleton");
        return 1;
    }
    nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    CHECK_RC(nRetVal, "Register to user callbacks");

    g_SkeletonCap = g_UserGenerator.GetSkeletonCap();
    nRetVal = g_SkeletonCap.RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
    CHECK_RC(nRetVal, "Register to calibration start");

    nRetVal = g_SkeletonCap.RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
    CHECK_RC(nRetVal, "Register to calibration complete");
    
    if (g_SkeletonCap.NeedPoseForCalibration()){
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)){
            LOG_E("%s", "Pose required, but not supported");
            return 1;
        }
        nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
        CHECK_RC(nRetVal, "Register to Pose Detected");
        g_SkeletonCap.GetCalibrationPose(g_strPose);
    }
    
    g_SkeletonCap.SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
    
    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");
    
    // 表示用の画像データの作成
    XnMapOutputMode mapMode;
    g_ImageGenerator.GetMapOutputMode(mapMode);
    g_rgbImage = cvCreateImage(cvSize(mapMode.nXRes, mapMode.nYRes), IPL_DEPTH_8U, 3);

    LOG_I("%s", "Starting to run");
    if(g_bNeedPose){
        LOG_I("%s", "Assume calibration pose");
    }

    xn::Recorder recorder;
    if( DO_RECORED && !USE_RECORED_DATA ){
        // レコーダーの作成
        LOG_I("%s", "Setup Recorder");
        nRetVal = recorder.Create(g_Context);
        CHECK_RC(nRetVal, "Create recorder");
        
        // 保存設定
        nRetVal = recorder.SetDestination(XN_RECORD_MEDIUM_FILE, RECORD_FILE_PATH);
        CHECK_RC(nRetVal, "Set recorder destination file");
        
        // 深度、ビデオカメラ入力を保存対象として記録開始
        nRetVal = recorder.AddNodeToRecording(g_DepthGenerator, XN_CODEC_NULL);
        CHECK_RC(nRetVal, "Add depth node to recording");
        nRetVal = recorder.AddNodeToRecording(g_ImageGenerator, XN_CODEC_NULL);
        CHECK_RC(nRetVal, "Add image node to recording");
        
        LOG_I("%s", "Recorder setup done.");
    }

    while (!xnOSWasKeyboardHit())
    {
        g_Context.WaitOneUpdateAll(g_UserGenerator);
        if( DO_RECORED  && !USE_RECORED_DATA ){
            nRetVal = recorder.Record();
            CHECK_RC(nRetVal, "Record");
        }

        // ビデオカメラ画像の生データを取得
        xn::ImageMetaData imageMetaData;
        g_ImageGenerator.GetMetaData(imageMetaData);
        // メモリコピー
        xnOSMemCopy(g_rgbImage->imageData, imageMetaData.RGB24Data(), g_rgbImage->imageSize);
        // BGRからRGBに変換して表示
        cvCvtColor(g_rgbImage, g_rgbImage, CV_RGB2BGR);

        // UserGeneratorからユーザー識別ピクセルを取得
        xn::SceneMetaData sceneMetaData;
        g_UserGenerator.GetUserPixels(0, sceneMetaData);
        
        XnUserID allUsers[MAX_NUM_USERS];
        XnUInt16 nUsers = MAX_NUM_USERS;
        g_UserGenerator.GetUsers(allUsers, nUsers);
        for (int i = 0; i < nUsers; i++) {
            
            // キャリブレーションに成功しているかどうか
            if (g_SkeletonCap.IsTracking(allUsers[i])) {
                // スケルトンを描画
                DrawSkelton(allUsers[i], i);
            }
        }
        
        // 表示
        cvShowImage("User View", g_rgbImage);

        // ESCもしくはqが押されたら終了させる
        if (cvWaitKey(10) == 27) {
            break;
        }
    }

    if( !USE_RECORED_DATA ){
        g_scriptNode.Release();
    }
    g_DepthGenerator.Release();
    g_UserGenerator.Release();
    g_Context.Release();

	if (g_rgbImage != NULL) {
		cvReleaseImage(&g_rgbImage);	
	}
	g_Context.Shutdown();

    
}
Exemple #23
0
int main(int argc, char **argv)
{
	XnStatus rc = XN_STATUS_OK;
	xn::EnumerationErrors errors;

	rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH);
	CHECK_ERRORS(rc, errors, "InitFromXmlFile");
	CHECK_RC(rc, "InitFromXml");

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, "Find depth generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	CHECK_RC(rc, "Find user generator");

	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON) ||
		!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
	{
		printf("User generator doesn't support either skeleton or pose detection.\n");
		return XN_STATUS_ERROR;
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	rc = g_Context.StartGeneratingAll();
	CHECK_RC(rc, "StartGenerating");

	XnCallbackHandle hUserCBs, hCalibrationCBs, hPoseCBs;
	g_UserGenerator.RegisterUserCallbacks(NewUser, LostUser, NULL, hUserCBs);
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(CalibrationStarted, CalibrationEnded, NULL, hCalibrationCBs);
	g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(PoseDetected, NULL, NULL, hPoseCBs);


	#ifdef USE_GLUT

	glInit(&argc, argv);
	glutMainLoop();

	#else

	if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context))
	{
		printf("Error initing opengles\n");
		CleanupExit();
	}

	glDisable(GL_DEPTH_TEST);
//	glEnable(GL_TEXTURE_2D);
	glEnableClientState(GL_VERTEX_ARRAY);
	glDisableClientState(GL_COLOR_ARRAY);

	while ((!_kbhit()) && (!g_bQuit))
	{
		glutDisplay();
		eglSwapBuffers(display, surface);
	}

	opengles_shutdown(display, surface, context);

	CleanupExit();

	#endif
}
int main()
{
    XnStatus nRetVal = XN_STATUS_OK;
    xn::EnumerationErrors errors;

    const char *fn = NULL;
    if    (fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
    else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
    else {
        printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
        return XN_STATUS_ERROR;
    }
    printf("Reading config from: '%s'\n", fn);

    nRetVal = g_Context.InitFromXmlFile(fn, g_scriptNode, &errors);
    if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
    {
        XnChar strError[1024];
        errors.ToString(strError, 1024);
        printf("%s\n", strError);
        return (nRetVal);
    }
    else if (nRetVal != XN_STATUS_OK)
    {
        printf("Open failed: %s\n", xnGetStatusString(nRetVal));
        return (nRetVal);
    }

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    if (nRetVal != XN_STATUS_OK)
    {
        nRetVal = g_UserGenerator.Create(g_Context);
        CHECK_RC(nRetVal, "Find user generator");
    }

    XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
    {
        printf("Supplied user generator doesn't support skeleton\n");
        return 1;
    }
    nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    CHECK_RC(nRetVal, "Register to user callbacks");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
    CHECK_RC(nRetVal, "Register to calibration start");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
    CHECK_RC(nRetVal, "Register to calibration complete");

    if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
    {
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
        {
            printf("Pose required, but not supported\n");
            return 1;
        }
        nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
        CHECK_RC(nRetVal, "Register to Pose Detected");
        g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
    }

    g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");

    XnUserID aUsers[MAX_NUM_USERS];
    XnUInt16 nUsers;
    XnSkeletonJointTransformation torsoJoint;

    printf("Starting to run\n");
    if(g_bNeedPose)
    {
        printf("Assume calibration pose\n");
    }

	while (!xnOSWasKeyboardHit())
    {
        g_Context.WaitOneUpdateAll(g_UserGenerator);
        // print the torso information for the first user already tracking
        nUsers=MAX_NUM_USERS;
        g_UserGenerator.GetUsers(aUsers, nUsers);
        for(XnUInt16 i=0; i<nUsers; i++)
        {
            if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])==FALSE)
                continue;

            g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_TORSO,torsoJoint);
                printf("user %d: head at (%6.2f,%6.2f,%6.2f)\n",aUsers[i],
                                                                torsoJoint.position.position.X,
                                                                torsoJoint.position.position.Y,
                                                                torsoJoint.position.position.Z);
        }
        
    }
    g_scriptNode.Release();
    g_UserGenerator.Release();
    g_Context.Release();

}
Exemple #25
0
int main(int argc, char **argv)
{
	sender.setup(HOST, PORT);
	
	XnStatus rc = XN_STATUS_OK;

	rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH);
	CHECK_RC(rc, "InitFromXml");

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, "Find depth generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	CHECK_RC(rc, "Find user generator");

	XnCallbackHandle h;

	g_UserGenerator.RegisterUserCallbacks(NewUser, LostUser, NULL, h);
	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	XnCallbackHandle hCalib;
	XnCallbackHandle hPose;
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(&CalibrationStart, &CalibrationEnd, NULL, hCalib);
	g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(&PoseDetected, NULL, NULL, hPose);

	rc = g_Context.StartGeneratingAll();
	CHECK_RC(rc, "StartGenerating");

	xn::DepthMetaData depthMD;
	g_DepthGenerator.GetMetaData(depthMD);

	fXRes = depthMD.XRes();
	fYRes = depthMD.YRes();
	fMaxDepth = depthMD.ZRes();
	
	nCutOffMin = 0;
	nCutOffMax = fMaxDepth;

	nPointerX = fXRes / 2;
	nPointerY = fXRes / 2;
	nPointerDiffX = (WIN_SIZE_X / fXRes / 2) + 1;
	nPointerDiffY = (WIN_SIZE_Y / fXRes / 2) + 1;

	if (argc == 2)
	{
		nCutOffMax = atol(argv[1]);
	}

	srand(XnUInt32(time(NULL)));

	glutInit(&argc, argv);
	glutInitDisplayString("stencil depth>16 double rgb samples=0");
    glutInitWindowSize(WIN_SIZE_X, WIN_SIZE_Y);
	glutCreateWindow("Prime Sense Stick Figure Sample");
	glutSetCursor(GLUT_CURSOR_NONE);

	init_opengl();

	glut_helpers_initialize();

	cb.passive_motion_function = MotionCallback;
	cb.keyboard_function = key;

	camera.configure_buttons(0);
	camera.set_camera_mode(true);
	camera.set_parent_rotation( & camera.trackball.r);
	camera.enable();

	object.configure_buttons(1);
	object.translator.t[2] = -1;
	object.translator.scale *= .1f;
	object.trackball.r = rotationf(vec3f(2.0,0.01,0.01), to_radians(180));
	object.set_parent_rotation( & camera.trackball.r);
	object.disable();

	light.configure_buttons(0);
	light.translator.t = vec3f(.5, .5, -1);
	light.set_parent_rotation( & camera.trackball.r);
	light.disable();

	reshaper.zNear = 1;
	reshaper.zFar = 100;

    // make sure all interactors get glut events
	glut_add_interactor(&cb);
	glut_add_interactor(&camera);
	glut_add_interactor(&reshaper);
	glut_add_interactor(&light);
	glut_add_interactor(&object);

	camera.translator.t = vec3f(0, 0, 0);
	camera.trackball.r = rotationf(vec3f(0, 0, 0), to_radians(0));

	light.translator.t = vec3f (0, 1.13, -2.41);
	light.trackball.r = rotationf(vec3f(0.6038, -0.1955, -0.4391), to_radians(102));

	glutIdleFunc(idle);
	glutDisplayFunc(display);
	// Per frame code is in display
	glutMainLoop();


	return (0);
}
int main(int argc, char **argv)
{
	XnStatus nRetVal = XN_STATUS_OK;

	if (argc > 1)
	{
		nRetVal = g_Context.Init();
		CHECK_RC(nRetVal, "Init");
		nRetVal = g_Context.OpenFileRecording(argv[1], g_Player);
		if (nRetVal != XN_STATUS_OK)
		{
			printf("Can't open recording %s: %s\n", argv[1], xnGetStatusString(nRetVal));
			return 1;
		}
	}
	else
	{
		xn::EnumerationErrors errors;
		nRetVal = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, g_scriptNode, &errors);
		if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
		{
			XnChar strError[1024];
			errors.ToString(strError, 1024);
			printf("%s\n", strError);
			return (nRetVal);
		}
		else if (nRetVal != XN_STATUS_OK)
		{
			printf("Open failed: %s\n", xnGetStatusString(nRetVal));
			return (nRetVal);
		}
	}

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(nRetVal, "Find depth generator");
	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

	XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected, hCalibrationInProgress, hPoseInProgress;
	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
	{
		printf("Supplied user generator doesn't support skeleton\n");
		return 1;
	}
	nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
	CHECK_RC(nRetVal, "Register to user callbacks");
	nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
	CHECK_RC(nRetVal, "Register to calibration start");
	nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
	CHECK_RC(nRetVal, "Register to calibration complete");

	if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
	{
		g_bNeedPose = TRUE;
		if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
		{
			printf("Pose required, but not supported\n");
			return 1;
		}
		nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
		CHECK_RC(nRetVal, "Register to Pose Detected");
		g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationInProgress(MyCalibrationInProgress, NULL, hCalibrationInProgress);
	CHECK_RC(nRetVal, "Register to calibration in progress");

	nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseInProgress(MyPoseInProgress, NULL, hPoseInProgress);
	CHECK_RC(nRetVal, "Register to pose in progress");

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");

#ifndef USE_GLES
	glInit(&argc, argv);
	glutMainLoop();
#else
	if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context))
	{
		printf("Error initializing opengles\n");
		CleanupExit();
	}

	glDisable(GL_DEPTH_TEST);
	glEnable(GL_TEXTURE_2D);
	glEnableClientState(GL_VERTEX_ARRAY);
	glDisableClientState(GL_COLOR_ARRAY);

	while (!g_bQuit)
	{
		glutDisplay();
		eglSwapBuffers(display, surface);
	}
	opengles_shutdown(display, surface, context);

	CleanupExit();
#endif
}
void KinectDataPub::spin(bool async)
{
  static bool onceThrough = false;

  if (!onceThrough){
    XnStatus nRetVal = XN_STATUS_OK;

    nRetVal = context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");
    onceThrough = true;

    printf("Starting to run\n");
    if (needPose){
      printf("Assume calibration pose\n");
    }
  }

  XnUserID aUsers[MAX_NUM_USERS];
  XnUInt16 nUsers;
  SkeletonInfo skels[MAX_NUM_USERS];
  SensorMsg msg;
  xn::DepthMetaData depthMD;
  xn::SceneMetaData sceneMD;

  while(!xnOSWasKeyboardHit()){
    context.WaitOneUpdateAll(userGen);

    /**
     * nUsers is used both as input and output parameters.
     * When it's used as input parameter, it indicates the size of the user ID
     * array, while, when it's used as output parameter, it means the number
     * of users recognized. Therefore, it needs to be re-initialized in a loop.
     */
    nUsers = MAX_NUM_USERS;
    userGen.GetUsers(aUsers, nUsers);
    memset(&msg, 0, sizeof(SensorMsg));

    for (XnUInt16 i = 0; i < nUsers; ++i){
      if (userGen.GetSkeletonCap().IsTracking(aUsers[i])){
	// get the user's skeleton
	skels[i].user = aUsers[i];
	getSkeleton(skels[i]);
      }
      else
	skels[i].user = 0; // user is not tracked
    }

    depthGen.GetMetaData(depthMD); // depth map
    userGen.GetUserPixels(0, sceneMD); // labels

    msg.nUsers = nUsers; // num of users detected, either tracked or not
    msg.pSkels = skels;
    msg.pDepthMD = &depthMD;
    msg.pSceneMD = &sceneMD;

    for (int i = 0; i < subscribers.size(); ++i)
      subscribers[i]->notify(RAD_SENSOR_MSG, &msg);

    if (async)
      return;
  }
}
int main(int argc, char **argv)
{
	XnStatus nRetVal = XN_STATUS_OK;

	memset(COM_tracker,0,15*100*sizeof(float));
	if (argc > 1)
	{
		nRetVal = g_Context.Init();
		CHECK_RC(nRetVal, "Init");
		nRetVal = g_Context.OpenFileRecording(argv[1], g_Player);
		if (nRetVal != XN_STATUS_OK)
		{
			printf("Can't open recording %s: %s\n", argv[1], xnGetStatusString(nRetVal));
			return 1;
		}
	}
	else
	{
		xn::EnumerationErrors errors;
		nRetVal = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, g_scriptNode, &errors);
		if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
		{
			XnChar strError[1024];
			errors.ToString(strError, 1024);
			printf("%s\n", strError);
			return (nRetVal);
		}
		else if (nRetVal != XN_STATUS_OK)
		{
			printf("Open failed: %s\n", xnGetStatusString(nRetVal));
			return (nRetVal);
		}
	}

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		printf("No depth generator found. Using a default one...");
		xn::MockDepthGenerator mockDepth;
		nRetVal = mockDepth.Create(g_Context);
		CHECK_RC(nRetVal, "Create mock depth");

		// set some defaults
		XnMapOutputMode defaultMode;
		defaultMode.nXRes = 320;
		defaultMode.nYRes = 240;
		defaultMode.nFPS = 30;
		nRetVal = mockDepth.SetMapOutputMode(defaultMode);
		CHECK_RC(nRetVal, "set default mode");

		// set FOV
		XnFieldOfView fov;
		fov.fHFOV = 1.0225999419141749;
		fov.fVFOV = 0.79661567681716894;
		nRetVal = mockDepth.SetGeneralProperty(XN_PROP_FIELD_OF_VIEW, sizeof(fov), &fov);
		CHECK_RC(nRetVal, "set FOV");

		XnUInt32 nDataSize = defaultMode.nXRes * defaultMode.nYRes * sizeof(XnDepthPixel);
		XnDepthPixel* pData = (XnDepthPixel*)xnOSCallocAligned(nDataSize, 1, XN_DEFAULT_MEM_ALIGN);

		nRetVal = mockDepth.SetData(1, 0, nDataSize, pData);
		CHECK_RC(nRetVal, "set empty depth map");

		g_DepthGenerator = mockDepth;
	}

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		printf("No image node exists! check your XML.");
		return 1;
	}

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

	XnCallbackHandle hUserCallbacks;
	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
	{
		printf("Supplied user generator doesn't support skeleton\n");
		return 1;
	}
	nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
	CHECK_RC(nRetVal, "Register to user callbacks");

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");

#ifndef USE_GLES
	glInit(&argc, argv);
	glutMainLoop();
#else
	if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context))
	{
		printf("Error initializing opengles\n");
		CleanupExit();
	}

	glDisable(GL_DEPTH_TEST);
	glEnable(GL_TEXTURE_2D);
	glEnableClientState(GL_VERTEX_ARRAY);
	glDisableClientState(GL_COLOR_ARRAY);

	while (!g_bQuit)
	{
		glutDisplay();
		eglSwapBuffers(display, surface);
	}
	opengles_shutdown(display, surface, context);

	CleanupExit();
#endif
}
int main(int argc, char **argv) {
    ros::init(argc, argv, "openni_hand_tracker");
    ros::NodeHandle nh;

    string configFilename = ros::package::getPath("openni_tracker") + "/openni_tracker.xml";
    XnStatus nRetVal = g_Context.InitFromXmlFile(configFilename.c_str());
    CHECK_RC(nRetVal, "InitFromXml");

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
    CHECK_RC(nRetVal, "Find depth generator");
	
    // Create generators
    nRetVal = g_GestureGenerator.Create(g_Context);
    CHECK_RC(nRetVal, "Unable to create GestureGenerator.");

    nRetVal = g_HandsGenerator.Create(g_Context);
    CHECK_RC(nRetVal, "Unable to create HandsGenerator.");
    
    ROS_INFO("Create Generator Success");

/*

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK) {
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
		ROS_INFO("Supplied user generator doesn't support skeleton");
		return 1;
	}

    XnCallbackHandle hUserCallbacks;
	g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);

	XnCallbackHandle hCalibrationCallbacks;
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);

	if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration()) {
		g_bNeedPose = TRUE;
		if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) {
			ROS_INFO("Pose required, but not supported");
			return 1;
		}

		XnCallbackHandle hPoseCallbacks;
		g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);

		g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
*/

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");

	ros::Rate r(30);

        
        ros::NodeHandle pnh("~");
        string frame_id("openni_depth_frame");
        pnh.getParam("camera_frame_id", frame_id);
                
	while (ros::ok()) {
		g_Context.WaitAndUpdateAll();
		r.sleep();
	}

	g_Context.Shutdown();
	return 0;
}
int main(int argc, char **argv) {
	nRetVal = XN_STATUS_OK;

	/* Context initialisieren (Kameradaten) */
	nRetVal = context.Init();
	checkError("Fehler beim Initialisieren des Context", nRetVal)?0:exit(-1);



	/* Tiefengenerator erstellen */
	nRetVal = depth.Create(context);
	checkError("Fehler beim Erstellen des Tiefengenerators", nRetVal)?0:exit(-1);

	/* Tiefengenerator einstellen */
	XnMapOutputMode outputModeDepth;
	outputModeDepth.nXRes = 640;
	outputModeDepth.nYRes = 480;
	outputModeDepth.nFPS = 30;
	nRetVal = depth.SetMapOutputMode(outputModeDepth);
	checkError("Fehler beim Konfigurieren des Tiefengenerators", nRetVal)?0:exit(-1);


	/* Imagegenerator erstellen */
	nRetVal = image.Create(context);
	checkError("Fehler beim Erstellen des Bildgenerators", nRetVal)?0:exit(-1);

	/* Imagegenerator einstellen */
	XnMapOutputMode outputModeImage;
	outputModeImage.nXRes = 640;
	outputModeImage.nYRes = 480;
	outputModeImage.nFPS = 30;
	nRetVal = image.SetMapOutputMode(outputModeImage);
	checkError("Fehler beim Konfigurieren des Bildgenerators", nRetVal)?0:exit(-1);	

	/* Starten der Generatoren - volle Kraft vorraus! */
	nRetVal = context.StartGeneratingAll();
	checkError("Fehler beim Starten der Generatoren", nRetVal)?0:exit(-1);

	/* Glut initialisieren */
	glutInit(&argc, argv);
	glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE);
	glutInitWindowSize(WINDOW_SIZE_X, WINDOW_SIZE_Y);
	glutInitWindowPosition(300,150);
	win = glutCreateWindow("kinect-head-tracking");
	glClearColor(0, 0, 0, 0.0); //Hintergrundfarbe: Hier ein leichtes Blau
	glEnable(GL_DEPTH_TEST);          //Tiefentest aktivieren
	glDepthFunc(GL_LEQUAL);
//	glEnable(GL_CULL_FACE);           //Backface Culling aktivieren
//	glEnable(GL_ALPHA_TEST);
//	glAlphaFunc(GL_GEQUAL, 1);

	/* Texturen */
	glGenTextures(1, &texture_rgb);
	glBindTexture(GL_TEXTURE_2D, texture_rgb);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);


	glGenTextures(1, &texture_depth);
	glBindTexture(GL_TEXTURE_2D, texture_depth);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);

	glutDisplayFunc(glut_display);
	glutIdleFunc(glut_idle);
	glutMouseFunc(glut_mouse);
	glutMotionFunc(glut_mouse_motion);
	glutKeyboardFunc(glut_keyboard);
	glutMainLoop();
	return 0;
}