int main(int argc, char **argv) { ros::init(argc, argv, "openni_hand_tracker"); ros::NodeHandle nh; string configFilename = ros::package::getPath("openni_tracker") + "/openni_tracker.xml"; XnStatus nRetVal = g_Context.InitFromXmlFile(configFilename.c_str()); CHECK_RC(nRetVal, "InitFromXml"); nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator); CHECK_RC(nRetVal, "Find depth generator"); // Create generators nRetVal = g_GestureGenerator.Create(g_Context); CHECK_RC(nRetVal, "Unable to create GestureGenerator."); nRetVal = g_HandsGenerator.Create(g_Context); CHECK_RC(nRetVal, "Unable to create HandsGenerator."); ROS_INFO("Create Generator Success"); /* nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator); if (nRetVal != XN_STATUS_OK) { nRetVal = g_UserGenerator.Create(g_Context); CHECK_RC(nRetVal, "Find user generator"); } if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) { ROS_INFO("Supplied user generator doesn't support skeleton"); return 1; } XnCallbackHandle hUserCallbacks; g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks); XnCallbackHandle hCalibrationCallbacks; g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks); if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration()) { g_bNeedPose = TRUE; if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) { ROS_INFO("Pose required, but not supported"); return 1; } XnCallbackHandle hPoseCallbacks; g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks); g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose); } g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL); */ nRetVal = g_Context.StartGeneratingAll(); CHECK_RC(nRetVal, "StartGenerating"); ros::Rate r(30); ros::NodeHandle pnh("~"); string frame_id("openni_depth_frame"); pnh.getParam("camera_frame_id", frame_id); while (ros::ok()) { g_Context.WaitAndUpdateAll(); r.sleep(); } g_Context.Shutdown(); return 0; }
void GestureGenerator_Create_wrapped(xn::GestureGenerator& self, xn::Context& context) { check( self.Create(context, NULL, NULL) ); }
int main(int argc, char* argv[]) { glue.Init(argc, argv, 640, 240, "TrackHand"); xn::Context context; XnStatus status = context.Init(); bmg::OnError(status, []{ std::cout << "Couldn't init OpenNi!" << std::endl; exit(1); }); xn::ImageGenerator image_generator; status = image_generator.Create(context); bmg::OnError(status, []{ std::cout << "Couldn't create image generator!" << std::endl; }); status = depth_generator.Create(context); bmg::OnError(status, []{ std::cout << "Couldn't create depth generator!" << std::endl; }); xn::ImageMetaData image_metadata; xn::DepthMetaData depth_metadata; // Create gesture & hands generators status = gesture_generator.Create(context); bmg::OnError(status, []{ std::cout << "Couldn't create gesture generator!" << std::endl; }); status = hands_generator.Create(context); bmg::OnError(status, []{ std::cout << "Couldn't create hands generator!" << std::endl; }); // Register to callbacks XnCallbackHandle h1, h2; gesture_generator .RegisterGestureCallbacks(Gesture_Recognized, Gesture_Process, NULL, h1); hands_generator .RegisterHandCallbacks(Hand_Create, Hand_Update, Hand_Destroy, NULL, h2); status = context.StartGeneratingAll(); bmg::OnError(status, []{ std::cout << "Couldn't generate all data!" << std::endl; }); status = gesture_generator.AddGesture(GESTURE, NULL); bmg::OnError(status, []{ std::cout << "Couldn't add gesture!" << std::endl; }); glue.BindDisplayFunc([&]{ glue.BeginDraw(); // here goes code for app main loop XnStatus status = context.WaitAndUpdateAll(); bmg::OnError(status, []{ std::cout << "Couldn't update and wait for new data!" << std::endl; }); image_generator.GetMetaData(image_metadata); unsigned imageX = image_metadata.XRes(); unsigned imageY = image_metadata.YRes(); glue.DrawOnTexture( (void*)image_metadata.RGB24Data(), imageX, imageY, imageX, imageY, 320, 0, 640, 240); depth_generator.GetMetaData(depth_metadata); unsigned depthX = depth_metadata.XRes(); unsigned depthY = depth_metadata.YRes(); XnRGB24Pixel* transformed_depth_map = new XnRGB24Pixel[depthX * depthY]; bmg::CalculateDepth( depth_generator.GetDepthMap(), depthX, depthY, MAX_DEPTH, transformed_depth_map); glue.DrawOnTexture( (void*)transformed_depth_map, depthX, depthY, depthX, depthY, 0, 0, 320, 240); delete [] transformed_depth_map; if (hand_recognized) { // Draw point over tracked hand glue.DrawPointOverRegion(static_cast<unsigned>(projective_point.X), static_cast<unsigned>(projective_point.Y), 0, 0); glue.DrawPointOverRegion(static_cast<unsigned>(projective_point.X), static_cast<unsigned>(projective_point.Y), 320, 0); } glue.EndDraw(); }); glue.BindKeyboardFunc([](unsigned char key, int x, int y){ switch(key) { case 27: exit(1); } }); glue.Run(); context.Release(); }
int main(int argc, char* argv[]) { // Start VideoCapture capture (CV_CAP_OPENNI); if(!capture.isOpened()) { int error = -1; return 1; } namedWindow( "Color Image", 1 ); //namedWindow( "Depth Map", 1); Mat view; bool blink = false; // NITE + openni XnStatus rc = XN_STATUS_OK; Context context; rc = context.Init(); rc = g_GestureGenerator.Create(context); rc = g_HandsGenerator.Create(context); XnCallbackHandle hcb1,hcb2; g_GestureGenerator.RegisterGestureCallbacks(Gesture_Recognized, Gesture_Process, NULL, hcb1); g_HandsGenerator.RegisterHandCallbacks(handCreate, handUpdate, handDestroy, NULL, hcb2); rc = context.StartGeneratingAll(); rc = g_GestureGenerator.AddGesture(GESTURE_TO_USE, NULL); double d = 1.0; double angle = 0.0; double angleZ = 0.0; Mat result; Mat orig = imread("crocus.jpg"); result.create(750, 750, CV_8UC3); double centerX = orig.cols/2; double centerY = orig.rows/2; warpPerspective(orig, orig, getScaleMatrix(1.0), orig.size(), INTER_CUBIC, BORDER_TRANSPARENT); while( capture.isOpened() ) { rc = context.WaitAndUpdateAll(); d = getZoom(); angle = getAngle(); angleZ = getAngle3D(); if (abs(d - oldZoom) > 0.009) { //printf("angle = %f \n",angle); oldAngle += angle; //create the transformation to be passed to warp Mat openCVTransform = getRotationMatrix2D(Point2f(centerX, centerY), oldAngle, d); //warp image to apply transformation result.setTo(Scalar(0)); warpAffine(orig, result, openCVTransform, result.size(), INTER_CUBIC, BORDER_TRANSPARENT); oldZoom = d; } imshow("Result", result); Mat bgrImage; capture.grab(); capture.retrieve( bgrImage, CV_CAP_OPENNI_BGR_IMAGE ); if (hand1ID != -1) { circle(bgrImage,Point(hand1.back().X + bgrImage.rows/2, bgrImage.cols/2 - hand1.back().Y),2,CV_RGB(0,255,0),3); } if (hand2ID != -1) { circle(bgrImage,Point(hand2.back().X + bgrImage.rows/2, bgrImage.cols/2 - hand2.back().Y),2,CV_RGB(0,255,0),3); } flip(bgrImage,bgrImage,1); imshow("Color Image", bgrImage); result.create(750, 750, CV_8UC3); if(waitKey(33) == 'q') { break; } } context.Shutdown(); return 0; }