コード例 #1
0
BP::list GestureGenerator_GetAvailableGestures(xn::GestureGenerator& self) {
    checkValid(self);

    XnUInt16 gestures = self.GetNumberOfAvailableGestures();
    BP::list ret;

    if (gestures > 0) {
        const XnUInt16 gestureNameBufferLength = 100; // pray that this is enough space per gesture name

        XnChar** buf = new XnChar*[gestures];
        for (XnUInt16 i = 0; i < gestures; i++)
            buf[i] = new XnChar[gestureNameBufferLength];

        check( self.EnumerateGestures(*buf, gestures) );

        for (XnUInt16 i = 0; i < gestures; i++)
            if (buf[i])
                ret.append(std::string(buf[i]));

        for (XnUInt16 i = 0; i < gestures; i++)
            delete buf[i];
        delete buf;
    }
    return ret;
}
コード例 #2
0
ファイル: OSCeleton.cpp プロジェクト: YggdrasiI/OSCeleton
void XN_CALLBACK_TYPE lost_hand(xn::HandsGenerator &generator, XnUserID nId, XnFloat fTime, void *pCookie) {
	printf("Lost Hand %d               \n", nId);
    gestureGenerator.AddGesture(GESTURE_TO_USE, NULL);

	if (kitchenMode) return;

	lo_send(addr, "/lost_user", NULL);
}
コード例 #3
0
ファイル: main.cpp プロジェクト: cphoward/AquaKinect
void CleanupExit()
{
	g_ScriptNode.Release();
	g_DepthGenerator.Release();
	g_GestureGenerator.Release();
	g_Context.Release();

	exit (1);
}
コード例 #4
0
ファイル: main.cpp プロジェクト: jakubsieradzki/BeMyGest
void XN_CALLBACK_TYPE Hand_Destroy(
    xn::HandsGenerator& generator,
    XnUserID nId,
    XnFloat fTime,
    void* pCookie)
{
  gesture_generator.AddGesture(GESTURE, NULL);
  hand_recognized = false;
}
コード例 #5
0
ファイル: main.cpp プロジェクト: jakubsieradzki/BeMyGest
// Define hand & gesture recognition callbacks
void XN_CALLBACK_TYPE Gesture_Recognized(
    xn::GestureGenerator& generator,
    const XnChar* strGesture,
    const XnPoint3D* pIDPosition,
    const XnPoint3D* pEndPosition,
    void* pCookie)
{
  gesture_generator.RemoveGesture(strGesture);
  hands_generator.StartTracking(*pEndPosition);
}
コード例 #6
0
ファイル: main.cpp プロジェクト: cphoward/AquaKinect
int configKinect(){
		
	XnStatus rc = XN_STATUS_OK;
	xn::EnumerationErrors errors;

	// Initialize OpenNI
	rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, g_ScriptNode, &errors);
	CHECK_ERRORS(rc, errors, "InitFromXmlFile");
	CHECK_RC(rc, "InitFromXmlFile");

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, "Find depth generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_GESTURE, g_GestureGenerator);
	CHECK_RC(rc, "Find gesture generator");

	XnCallbackHandle hGestureIntermediateStageCompleted, hGestureProgress, hGestureReadyForNextIntermediateStage;
	g_GestureGenerator.RegisterToGestureIntermediateStageCompleted(GestureIntermediateStageCompletedHandler, NULL, hGestureIntermediateStageCompleted);
	g_GestureGenerator.RegisterToGestureReadyForNextIntermediateStage(GestureReadyForNextIntermediateStageHandler, NULL, hGestureReadyForNextIntermediateStage);
	g_GestureGenerator.RegisterGestureCallbacks(NULL, GestureProgressHandler, NULL, hGestureProgress);

	// Create NITE objects
	g_pSessionManager = new XnVSessionManager;
	rc = g_pSessionManager->Initialize(&g_Context, "Click,Wave", "RaiseHand");
	CHECK_RC(rc, "SessionManager::Initialize");

	g_pSessionManager->RegisterSession(NULL, SessionStarting, SessionEnding, FocusProgress);

	pointHandler = new PointHandler(20, g_DepthGenerator); 
	g_pFlowRouter = new XnVFlowRouter;
	g_pFlowRouter->SetActive(pointHandler);

	g_pSessionManager->AddListener(g_pFlowRouter);

	pointHandler->RegisterNoPoints(NULL, NoHands);

	// Initialization done. Start generating
	rc = g_Context.StartGeneratingAll();
	CHECK_RC(rc, "StartGenerating");

	return rc;
}
コード例 #7
0
ファイル: OSCeleton.cpp プロジェクト: snowyu/OSCeleton
void XN_CALLBACK_TYPE lost_hand(xn::HandsGenerator &generator, XnUserID nId, XnFloat fTime, void *pCookie) {
	printf("Lost Hand %d               \n", nId);
    gestureGenerator.AddGesture(GESTURE_TO_USE, NULL);
    
	if (kitchenMode) return;

	osc::OutboundPacketStream p( osc_buffer, OUTPUT_BUFFER_SIZE );
	p << osc::BeginBundleImmediate;
	p << osc::BeginMessage("/lost_user");
	p << (int)nId;
	p << osc::EndMessage;
	p << osc::EndBundle;
	transmitSocket->Send(p.Data(), p.Size());
}
コード例 #8
0
ファイル: kinect.cpp プロジェクト: wenzong/lab
void XN_CALLBACK_TYPE Kinect::GestureRecognized( xn::GestureGenerator& generator,const XnChar* strGesture,
	const XnPoint3D* pIDPosition,const XnPoint3D* pEndPosition,void* pCookie )
{
	Kinect * kinect = ((Kinect *)pCookie);
	if( strcmp( strGesture, "Wave" ) == 0 )
	{
		mouse_event(MOUSEEVENTF_LEFTDOWN,0,0,0,0);
		mouse_event(MOUSEEVENTF_LEFTUP,0,0,0,0);
	}
	else if( strcmp( strGesture, "RaiseHand" ) == 0 )
	{
		generator.RemoveGesture(strGesture);
		kinect->m_HandsGenerator.StartTracking( *pEndPosition );
	}
}
コード例 #9
0
void XN_CALLBACK_TYPE handDestroy(HandsGenerator &generator, XnUserID user, XnFloat fTime, void *pCookie){
	printf("hand destroy \n");
	if (hand1ID == user) {
		hand1ID = -1;
		while (hand1.size() > 0) {
			hand1.pop();
		}
	} else if (hand2ID == user) {
		hand2ID = -1;
		while (hand2.size() > 0) {
			hand2.pop();
		}
	}
	oldZoom = 1;
	oldAngle = 0;
	g_GestureGenerator.AddGesture(GESTURE_TO_USE, NULL);
}
コード例 #10
0
void XN_CALLBACK_TYPE lost_hand(xn::HandsGenerator &generator, XnUserID nId, XnFloat fTime, void *pCookie) {
	printf("{\"lost_hand\":{\"userid\":%d}, \"elapsed\":%.3f}}\n", nId, clockAsFloat(last));
	gestureGenerator.AddGesture(GESTURE_TO_USE, NULL);
}
コード例 #11
0
//gesture callbacks
void XN_CALLBACK_TYPE Gesture_Recognized(xn::GestureGenerator& generator, const XnChar* strGesture, const XnPoint3D* pIDPosition, const XnPoint3D* pEndPosition, void* pCookie) {
	printf("{\"gesture\":{\"type\":\"%s\"}, \"elapsed\":%.3f}}\n", strGesture, clockAsFloat(last));
	gestureGenerator.RemoveGesture(strGesture);
	handsGenerator.StartTracking(*pEndPosition);
}
コード例 #12
0
ファイル: main.cpp プロジェクト: jakubsieradzki/BeMyGest
int main(int argc, char* argv[]) {
  glue.Init(argc, argv, 640, 240, "TrackHand");

  xn::Context context;
  XnStatus status = context.Init();
  bmg::OnError(status, []{
    std::cout << "Couldn't init OpenNi!" << std::endl;
    exit(1);
  });

  xn::ImageGenerator image_generator;
  status = image_generator.Create(context);
  bmg::OnError(status, []{
    std::cout << "Couldn't create image generator!" << std::endl;
  });

  status = depth_generator.Create(context);
  bmg::OnError(status, []{
    std::cout << "Couldn't create depth generator!" << std::endl;
  });

  xn::ImageMetaData image_metadata;
  xn::DepthMetaData depth_metadata;

  // Create gesture & hands generators
  status = gesture_generator.Create(context);
  bmg::OnError(status, []{
    std::cout << "Couldn't create gesture generator!" << std::endl;
  });
  status = hands_generator.Create(context);
  bmg::OnError(status, []{
    std::cout << "Couldn't create hands generator!" << std::endl;
  });

  // Register to callbacks
  XnCallbackHandle h1, h2;
  gesture_generator
    .RegisterGestureCallbacks(Gesture_Recognized, Gesture_Process, NULL, h1);
  hands_generator
    .RegisterHandCallbacks(Hand_Create, Hand_Update, Hand_Destroy, NULL, h2);

  status = context.StartGeneratingAll();
  bmg::OnError(status, []{
    std::cout << "Couldn't generate all data!" << std::endl;
  });
  status = gesture_generator.AddGesture(GESTURE, NULL);
  bmg::OnError(status, []{
    std::cout << "Couldn't add gesture!" << std::endl;
  });

  glue.BindDisplayFunc([&]{
    glue.BeginDraw();

    // here goes code for app main loop
    XnStatus status = context.WaitAndUpdateAll();
    bmg::OnError(status, []{
      std::cout << "Couldn't update and wait for new data!" << std::endl;
    });

    image_generator.GetMetaData(image_metadata);
    unsigned imageX = image_metadata.XRes();
    unsigned imageY = image_metadata.YRes();

    glue.DrawOnTexture(
      (void*)image_metadata.RGB24Data(),
      imageX, imageY,
      imageX, imageY,
      320, 0, 640, 240);

    depth_generator.GetMetaData(depth_metadata);
    unsigned depthX = depth_metadata.XRes();
    unsigned depthY = depth_metadata.YRes();

    XnRGB24Pixel* transformed_depth_map = new XnRGB24Pixel[depthX * depthY];
    bmg::CalculateDepth(
      depth_generator.GetDepthMap(), depthX, depthY, MAX_DEPTH, transformed_depth_map);

    glue.DrawOnTexture(
      (void*)transformed_depth_map,
      depthX, depthY,
      depthX, depthY,
      0, 0,
      320, 240);
    delete [] transformed_depth_map;

    if (hand_recognized) {
      // Draw point over tracked hand
      glue.DrawPointOverRegion(static_cast<unsigned>(projective_point.X), static_cast<unsigned>(projective_point.Y), 0, 0);
      glue.DrawPointOverRegion(static_cast<unsigned>(projective_point.X), static_cast<unsigned>(projective_point.Y), 320, 0);
    }

    glue.EndDraw();
  });

  glue.BindKeyboardFunc([](unsigned char key, int x, int y){
    switch(key) {
    case 27:
      exit(1);
    }
  });

  glue.Run();
  context.Release();
}
コード例 #13
0
int main(int argc, char* argv[])
{
	// Start 
    VideoCapture capture (CV_CAP_OPENNI);
    if(!capture.isOpened())
    {
        int error = -1;
        return 1;
    }

    namedWindow( "Color Image", 1 );
	//namedWindow( "Depth Map", 1);
    Mat view;
    bool blink = false;


	// NITE + openni
	
	XnStatus rc = XN_STATUS_OK;
	
	Context context;
	rc = context.Init();
	rc = g_GestureGenerator.Create(context);
	rc = g_HandsGenerator.Create(context);
	XnCallbackHandle hcb1,hcb2; 
	g_GestureGenerator.RegisterGestureCallbacks(Gesture_Recognized, Gesture_Process, NULL, hcb1);
	g_HandsGenerator.RegisterHandCallbacks(handCreate, handUpdate, handDestroy, NULL, hcb2);
	rc = context.StartGeneratingAll();
	rc = g_GestureGenerator.AddGesture(GESTURE_TO_USE, NULL);

	double d = 1.0;
	double angle = 0.0;
	double angleZ = 0.0;

	Mat result;
	
	Mat orig = imread("crocus.jpg");
	result.create(750, 750, CV_8UC3);
	double centerX = orig.cols/2;
    double centerY = orig.rows/2;
	warpPerspective(orig, orig, getScaleMatrix(1.0), orig.size(), INTER_CUBIC, BORDER_TRANSPARENT);

    while( capture.isOpened() )
    {
		rc = context.WaitAndUpdateAll();
		d = getZoom();
		angle = getAngle();
		angleZ = getAngle3D();
		if (abs(d - oldZoom) > 0.009) {
			//printf("angle = %f \n",angle);
			oldAngle += angle;

			//create the transformation to be passed to warp
			Mat openCVTransform = getRotationMatrix2D(Point2f(centerX, centerY), oldAngle, d);

			//warp image to apply transformation
			result.setTo(Scalar(0));
			warpAffine(orig, result, openCVTransform, result.size(), INTER_CUBIC, BORDER_TRANSPARENT);
			oldZoom = d;
			
		}
		imshow("Result", result);
		

        Mat bgrImage;
        capture.grab();

		capture.retrieve( bgrImage, CV_CAP_OPENNI_BGR_IMAGE );

		if (hand1ID != -1) {
			circle(bgrImage,Point(hand1.back().X + bgrImage.rows/2, bgrImage.cols/2 - hand1.back().Y),2,CV_RGB(0,255,0),3);
		
		}
		if (hand2ID != -1) {
			circle(bgrImage,Point(hand2.back().X + bgrImage.rows/2, bgrImage.cols/2 - hand2.back().Y),2,CV_RGB(0,255,0),3);
		}
		flip(bgrImage,bgrImage,1);
        imshow("Color Image", bgrImage);
		result.create(750, 750, CV_8UC3);
		
        if(waitKey(33) == 'q')
        {
            break;
        }
    }
	context.Shutdown();
    return 0;
}
コード例 #14
0
void GestureGenerator_UnregisterGestureCallbacks_wrapped(xn::GestureGenerator& self, XnCallbackHandle* handle) {
    checkValid(self);

    self.UnregisterGestureCallbacks(*handle);
}
コード例 #15
0
XnBool GestureGenerator_IsGestureAvailable_wrapped(xn::GestureGenerator& self, std::string gesture) {
    checkValid(self);
    
    return self.IsGestureAvailable(gesture.c_str());
}
コード例 #16
0
XnBool GestureGenerator_IsGestureProgressSupported_wrapped(xn::GestureGenerator& self, std::string gesture) {
    checkValid(self);
    
    return self.IsGestureProgressSupported(gesture.c_str());
}
コード例 #17
0
void GestureGenerator_RemoveGesture_wrapped(xn::GestureGenerator& self, std::string gesture) {
    checkValid(self);
    
    check( self.RemoveGesture(gesture.c_str()) );
}
コード例 #18
0
void GestureGenerator_AddGesture_wrapped(xn::GestureGenerator& self, std::string gesture) {
    checkValid(self);
    
    check( self.AddGesture(gesture.c_str(), NULL) ); //FIXME: add default params here
}
コード例 #19
0
void GestureGenerator_Create_wrapped(xn::GestureGenerator& self, xn::Context& context) {
    check( self.Create(context, NULL, NULL) );
}
コード例 #20
0
int main(int argc, char **argv) {
    ros::init(argc, argv, "openni_hand_tracker");
    ros::NodeHandle nh;

    string configFilename = ros::package::getPath("openni_tracker") + "/openni_tracker.xml";
    XnStatus nRetVal = g_Context.InitFromXmlFile(configFilename.c_str());
    CHECK_RC(nRetVal, "InitFromXml");

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
    CHECK_RC(nRetVal, "Find depth generator");
	
    // Create generators
    nRetVal = g_GestureGenerator.Create(g_Context);
    CHECK_RC(nRetVal, "Unable to create GestureGenerator.");

    nRetVal = g_HandsGenerator.Create(g_Context);
    CHECK_RC(nRetVal, "Unable to create HandsGenerator.");
    
    ROS_INFO("Create Generator Success");

/*

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK) {
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
		ROS_INFO("Supplied user generator doesn't support skeleton");
		return 1;
	}

    XnCallbackHandle hUserCallbacks;
	g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);

	XnCallbackHandle hCalibrationCallbacks;
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);

	if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration()) {
		g_bNeedPose = TRUE;
		if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) {
			ROS_INFO("Pose required, but not supported");
			return 1;
		}

		XnCallbackHandle hPoseCallbacks;
		g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);

		g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
*/

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");

	ros::Rate r(30);

        
        ros::NodeHandle pnh("~");
        string frame_id("openni_depth_frame");
        pnh.getParam("camera_frame_id", frame_id);
                
	while (ros::ok()) {
		g_Context.WaitAndUpdateAll();
		r.sleep();
	}

	g_Context.Shutdown();
	return 0;
}
コード例 #21
0
ファイル: main.cpp プロジェクト: avinashb-sd/MaestroRepo
int main(int argc, char ** argv)
{
	XnStatus rc = XN_STATUS_OK;
	xn::EnumerationErrors errors;

	// Initialize OpenNI
	rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, g_ScriptNode, &errors);
	CHECK_ERRORS(rc, errors, "InitFromXmlFile");
	CHECK_RC(rc, "InitFromXmlFile");

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, "Find depth generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_HANDS, g_HandsGenerator);
	CHECK_RC(rc, "Find hands generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_GESTURE, g_GestureGenerator);
	CHECK_RC(rc, "Find gesture generator");

	XnCallbackHandle h;
	if (g_HandsGenerator.IsCapabilitySupported(XN_CAPABILITY_HAND_TOUCHING_FOV_EDGE))
	{
		g_HandsGenerator.GetHandTouchingFOVEdgeCap().RegisterToHandTouchingFOVEdge(TouchingCallback, NULL, h);
	}

	XnCallbackHandle hGestureIntermediateStageCompleted, hGestureProgress, hGestureReadyForNextIntermediateStage;
	g_GestureGenerator.RegisterToGestureIntermediateStageCompleted(GestureIntermediateStageCompletedHandler, NULL, hGestureIntermediateStageCompleted);
	g_GestureGenerator.RegisterToGestureReadyForNextIntermediateStage(GestureReadyForNextIntermediateStageHandler, NULL, hGestureReadyForNextIntermediateStage);
	g_GestureGenerator.RegisterGestureCallbacks(NULL, GestureProgressHandler, NULL, hGestureProgress);


	// Create NITE objects
	g_pSessionManager = new XnVSessionManager;
	rc = g_pSessionManager->Initialize(&g_Context, "Click,Wave", "RaiseHand");
	CHECK_RC(rc, "SessionManager::Initialize");

	g_pSessionManager->RegisterSession(NULL, SessionStarting, SessionEnding, FocusProgress);

	g_pDrawer = new XnVPointDrawer(20, g_DepthGenerator); 
	g_pFlowRouter = new XnVFlowRouter;
	g_pFlowRouter->SetActive(g_pDrawer);

	g_pSessionManager->AddListener(g_pFlowRouter);

	g_pDrawer->RegisterNoPoints(NULL, NoHands);
	g_pDrawer->SetDepthMap(g_bDrawDepthMap);

	// Initialization done. Start generating
	rc = g_Context.StartGeneratingAll();
	CHECK_RC(rc, "StartGenerating");

	// Mainloop
#ifdef USE_GLUT

	glInit(&argc, argv);
	glutMainLoop();

#elif defined(USE_GLES)
	if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context))
	{
		printf("Error initializing opengles\n");
		CleanupExit();
	}
	glDisable(GL_DEPTH_TEST);
	glEnable(GL_TEXTURE_2D);
	glEnableClientState(GL_VERTEX_ARRAY);
	glDisableClientState(GL_COLOR_ARRAY);

	while ((!_kbhit()) && (!g_bQuit))
	{
		glutDisplay();
		eglSwapBuffers(display, surface);
	}
	opengles_shutdown(display, surface, context);

	CleanupExit();
#endif
}