Пример #1
0
bool DataCapture::captureOne()
{
    XnStatus rc = context_.WaitAndUpdateAll(); // want this to be WaitOneUpdateAll(RGB image)
    if( rc != XN_STATUS_OK )
    {
        std::cout << "WaitAndUpdateAll: " << xnGetStatusString(rc) << std::endl;
        return false;
    }

    // grab image
    imageGen_.GetMetaData(imageMd_);
    const XnRGB24Pixel* rgbData = imageMd_.RGB24Data();
    for( unsigned int i = 0; i < 640 * 480; ++i )
    {
        pRgbData_[3*i] = rgbData->nRed;
        pRgbData_[3*i + 1] = rgbData->nGreen;
        pRgbData_[3*i + 2] = rgbData->nBlue;
        ++rgbData;
    }

    // grab depth image
    depthGen_.GetMetaData(depthMd_);
    const uint16_t* pDepthDataU16 = depthMd_.Data();
    for( int i = 0; i < 640 * 480; ++i)
    {
        uint16_t d = pDepthDataU16[i];
        if( d != 0 )
        {
            pDepthData_[i] = (d * 255)/2048;
        }
        else
        {
            pDepthData_[i] = 0; // should be NAN
        }
    }
    return true;
}
Пример #2
0
void draw() {
	glClear( GL_COLOR_BUFFER_BIT );

	XnMapOutputMode mode;
	g_DepthGenerator.GetMapOutputMode(mode);

	Matrix4f projectionMatrix = Matrix4f();
	projectionMatrix.Ortho(0, mode.nXRes, mode.nYRes, 0, -1.0, 1.0);

	Vector primaryPoint;
	primaryPoint = pointHandler->getPrimaryPoint();
	Vector movement = primaryPoint - position;
	movement.normalize();
	position += movement;
	position.print();

	Matrix4f modelViewMatrix;
	modelViewMatrix.translate(position.getX(), position.getY(), 0);

	Matrix4f modelViewProjectionMatrix;
	modelViewProjectionMatrix = projectionMatrix * modelViewMatrix;
	//glUniformMatrix4fv(matLoc, 1, GL_FALSE, modelViewProjectionMatrix); 
	// Use our shader
	glUseProgram(programObject);

	modelViewProjectionMatrix.uniformMatrix(matLoc);

	// Draw the triangle
	glDrawArrays(GL_LINE_LOOP, 0, 3);

	g_Context.WaitOneUpdateAll(g_DepthGenerator);
	// Update NITE tree
	g_pSessionManager->Update(&g_Context);
	PrintSessionState(g_SessionState);

	SDL_GL_SwapBuffers();
}
Пример #3
0
void DrawCircle(xn::UserGenerator& userGenerator,
              xn::DepthGenerator& depthGenerator,
              XnUserID player, XnSkeletonJoint eJoint, float radius, XnFloat *color3f) 
{ 

	XnSkeletonJointPosition joint;
	userGenerator.GetSkeletonCap().GetSkeletonJointPosition(player, eJoint, joint);

	if (joint.fConfidence < 0.5){
           return;
        }	

    	XnPoint3D pt;
	pt = joint.position;
        depthGenerator.ConvertRealWorldToProjective(1, &pt, &pt);
        float cx = pt.X;
	float cy = pt.Y;
        float r = radius;
	int num_segments = 16;

	glColor3f(color3f[0], color3f[1], color3f[2]);

	glBegin(GL_TRIANGLE_FAN); 
	glVertex2f(cx , cy);
	for(int i = 0; i <= num_segments; i++) 
	{ 
		float theta = 2.0f * 3.1415926f * float(i) / float(num_segments);//get the current angle 

		float x = r * cosf(theta);//calculate the x component 
		float y = r * sinf(theta);//calculate the y component 

		glVertex2f(x +cx , y + cy);//output vertex 

	} 
	glEnd(); 
}
Пример #4
0
XnPoint3D getJointPoint(XnUserID player, XnSkeletonJoint eJoint) {
	  XnPoint3D pt;
    if (!g_UserGenerator.GetSkeletonCap().IsCalibrated(player))
    {
        printf("not calibrated!\n");
        return pt;
    }
    if (!g_UserGenerator.GetSkeletonCap().IsTracking(player))
    {
        printf("not tracked!\n");
        return pt;
    }

    XnSkeletonJointPosition joint;
    g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(player, eJoint, joint);

    if (joint.fConfidence < 0.5)
    {
        return pt;
    }

    g_DepthGenerator.ConvertRealWorldToProjective(2, &joint.position, &pt);
    return pt;
}
Пример #5
0
void DrawDepthMap(const xn::DepthMetaData& dmd, const xn::SceneMetaData& smd)
{
	static bool bInitialized = false;
	static GLuint depthTexID;
	static unsigned char* pDepthTexBuf;
	static int texWidth, texHeight;

	float topLeftX;
	float topLeftY;
	float bottomRightY;
	float bottomRightX;
	float texXpos;
	float texYpos;

	if(!bInitialized)
	{
		texWidth =  getClosestPowerOfTwo(dmd.XRes());
		texHeight = getClosestPowerOfTwo(dmd.YRes());

//		printf("Initializing depth texture: width = %d, height = %d\n", texWidth, texHeight);
		depthTexID = initTexture((void**)&pDepthTexBuf,texWidth, texHeight) ;

//		printf("Initialized depth texture: width = %d, height = %d\n", texWidth, texHeight);
		bInitialized = true;

		topLeftX = dmd.XRes();
		topLeftY = 0;
		bottomRightY = dmd.YRes();
		bottomRightX = 0;
		texXpos =(float)dmd.XRes()/texWidth;
		texYpos  =(float)dmd.YRes()/texHeight;

		memset(texcoords, 0, 8*sizeof(float));
		texcoords[0] = texXpos, texcoords[1] = texYpos, texcoords[2] = texXpos, texcoords[7] = texYpos;
	}

	unsigned int nValue = 0;
	unsigned int nHistValue = 0;
	unsigned int nIndex = 0;
	unsigned int nX = 0;
	unsigned int nY = 0;
	unsigned int nNumberOfPoints = 0;
	XnUInt16 g_nXRes = dmd.XRes();
	XnUInt16 g_nYRes = dmd.YRes();

	unsigned char* pDestImage = pDepthTexBuf;

	const XnDepthPixel* pDepth = dmd.Data();
	const XnLabel* pLabels = smd.Data();

	// Calculate the accumulative histogram
	memset(g_pDepthHist, 0, MAX_DEPTH*sizeof(float));
	for (nY=0; nY<g_nYRes; nY++)
	{
		for (nX=0; nX<g_nXRes; nX++)
		{
			nValue = *pDepth;

			if (nValue != 0)
			{
				g_pDepthHist[nValue]++;
				nNumberOfPoints++;
			}

			pDepth++;
		}
	}

	for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
	{
		g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1];
	}
	if (nNumberOfPoints)
	{
		for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
		{
			g_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints)));
		}
	}

	pDepth = dmd.Data();
	if (g_bDrawPixels)
	{
		XnUInt32 nIndex = 0;
		// Prepare the texture map
		for (nY=0; nY<g_nYRes; nY++)
		{
			for (nX=0; nX < g_nXRes; nX++, nIndex++)
			{

				pDestImage[0] = 0;
				pDestImage[1] = 0;
				pDestImage[2] = 0;
				if (g_bDrawBackground || *pLabels != 0)
				{
					nValue = *pDepth;
					XnLabel label = *pLabels;
					XnUInt32 nColorID = label % nColors;
					if (label == 0)
					{
						nColorID = nColors;
					}

					if (nValue != 0)
					{
						nHistValue = g_pDepthHist[nValue];

						pDestImage[0] = nHistValue * Colors[nColorID][0];
						pDestImage[1] = nHistValue * Colors[nColorID][1];
						pDestImage[2] = nHistValue * Colors[nColorID][2];
					}
				}

				pDepth++;
				pLabels++;
				pDestImage+=3;
			}

			pDestImage += (texWidth - g_nXRes) *3;
		}
	}
	else
	{
		xnOSMemSet(pDepthTexBuf, 0, 3*2*g_nXRes*g_nYRes);
	}

	glBindTexture(GL_TEXTURE_2D, depthTexID);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, texWidth, texHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, pDepthTexBuf);

	// Display the OpenGL texture map
	glColor4f(0.75,0.75,0.75,1);

	glEnable(GL_TEXTURE_2D);
	DrawTexture(dmd.XRes(),dmd.YRes(),0,0);
	glDisable(GL_TEXTURE_2D);

	char strLabel[50] = "";
	XnUserID aUsers[15];
	XnUInt16 nUsers = 15;
	g_UserGenerator.GetUsers(aUsers, nUsers);
	for (int i = 0; i < nUsers; ++i)
	{

		if (g_bPrintID)
		{
			XnPoint3D com;
			g_UserGenerator.GetCoM(aUsers[i], com);
			g_DepthGenerator.ConvertRealWorldToProjective(1, &com, &com);

			xnOSMemSet(strLabel, 0, sizeof(strLabel));
			if (!g_bPrintState)
			{
				// Tracking
				sprintf(strLabel, "%d", aUsers[i]);
			}
			else if (g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i]))
			{
				// Tracking
				sprintf(strLabel, "%d - Tracking", aUsers[i]);
			}
			else if (g_UserGenerator.GetSkeletonCap().IsCalibrating(aUsers[i]))
			{
				// Calibrating
				sprintf(strLabel, "%d - Calibrating [%s]", aUsers[i], GetCalibrationErrorString(m_Errors[aUsers[i]].first));
			}
			else
			{
				// Nothing
				sprintf(strLabel, "%d - Looking for pose [%s]", aUsers[i], GetPoseErrorString(m_Errors[aUsers[i]].second));
			}


			glColor4f(1-Colors[i%nColors][0], 1-Colors[i%nColors][1], 1-Colors[i%nColors][2], 1);

			glRasterPos2i(com.X, com.Y);
			glPrintString(GLUT_BITMAP_HELVETICA_18, strLabel);
		}

		//Draw skeleton
		if (g_bDrawSkeleton && g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i]))
		{

			glBegin(GL_LINES);

			glColor4f(1-Colors[aUsers[i]%nColors][0], 1-Colors[aUsers[i]%nColors][1], 1-Colors[aUsers[i]%nColors][2], 1);
			DrawLimb(aUsers[i], XN_SKEL_HEAD, XN_SKEL_NECK);

			DrawLimb(aUsers[i], XN_SKEL_NECK, XN_SKEL_LEFT_SHOULDER);
			DrawLimb(aUsers[i], XN_SKEL_LEFT_SHOULDER, XN_SKEL_LEFT_ELBOW);
			DrawLimb(aUsers[i], XN_SKEL_LEFT_ELBOW, XN_SKEL_LEFT_HAND);

			DrawLimb(aUsers[i], XN_SKEL_NECK, XN_SKEL_RIGHT_SHOULDER);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_SHOULDER, XN_SKEL_RIGHT_ELBOW);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_ELBOW, XN_SKEL_RIGHT_HAND);

			DrawLimb(aUsers[i], XN_SKEL_LEFT_SHOULDER, XN_SKEL_TORSO);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_SHOULDER, XN_SKEL_TORSO);

			DrawLimb(aUsers[i], XN_SKEL_TORSO, XN_SKEL_LEFT_HIP);
			DrawLimb(aUsers[i], XN_SKEL_LEFT_HIP, XN_SKEL_LEFT_KNEE);
			DrawLimb(aUsers[i], XN_SKEL_LEFT_KNEE, XN_SKEL_LEFT_FOOT);

			DrawLimb(aUsers[i], XN_SKEL_TORSO, XN_SKEL_RIGHT_HIP);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_HIP, XN_SKEL_RIGHT_KNEE);
			DrawLimb(aUsers[i], XN_SKEL_RIGHT_KNEE, XN_SKEL_RIGHT_FOOT);

			DrawLimb(aUsers[i], XN_SKEL_LEFT_HIP, XN_SKEL_RIGHT_HIP);

			glEnd();

		}
	}
}
Пример #6
0
cv::Mat xncv::captureDepth(const xn::DepthGenerator& generator)
{
	xn::DepthMetaData meta;
	generator.GetMetaData(meta);
	return cv::Mat(meta.YRes(), meta.XRes(), cv::DataType<ushort>::type, (void*)meta.Data());
}
Пример #7
0
int main ( int argc, char ** argv )
{
	// 
	// Initializing Calibration Related
	// 

	// ARTagHelper artagHelper	( colorImgWidth, colorImgHeight, ARTAG_CONFIG_FILE,		ARTAG_POS_FILE );
	ARTagHelper artagHelper		( colorImgWidth, colorImgHeight, ARTAG_CONFIG_A3_FILE,	ARTAG_POS_A3_FILE );

	ExtrCalibrator extrCalibrator ( 6, KINECT_INTR_FILE, KINECT_DIST_FILE );
	// unsigned char * kinectImgBuf = new unsigned char[colorImgWidth * colorImgHeight * 3];

	// 
	// Initializing OpenNI Settings
	// 

	int ctlWndKey = -1;

	XnStatus nRetVal = XN_STATUS_OK;
	xn::EnumerationErrors errors;

	// 
	// Initialize Context Object
	// 
	
	nRetVal = g_Context.InitFromXmlFile ( CONFIG_XML_PATH, g_ScriptNode, &errors );
	if ( nRetVal == XN_STATUS_NO_NODE_PRESENT ) 
	{
		XnChar strError[1024];
		errors.ToString ( strError, 1024 );
		printf ( "XN_STATUS_NO_NODE_PRESENT:\n%s\n", strError );
		
		system ( "pause" );
		return ( nRetVal );
	}
	else if ( nRetVal != XN_STATUS_OK )
	{
		printf ( "Open FAILED:\n%s\n", xnGetStatusString ( nRetVal ) );	
	
		system ( "pause" );
		return ( nRetVal );
	}

	// 
	// Handle the Depth Generator Node.
	// 
	
	nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_DEPTH, g_DepthGen );
	if ( nRetVal != XN_STATUS_OK )
	{
		printf ( "No Depth Node Exists! Please Check your XML.\n" );
		return ( nRetVal );
	}
	
	// 
	// Handle the Image Generator node
	// 
	
	nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_IMAGE, g_ImageGen );
	if ( nRetVal != XN_STATUS_OK )
	{
		printf ( "No Image Node Exists! Please Check your XML.\n" );
		return ( nRetVal );
	}

	// g_DepthGen.GetAlternativeViewPointCap().SetViewPoint( g_ImageGen );

	g_DepthGen.GetMetaData ( g_DepthMD );
	g_ImageGen.GetMetaData ( g_ImageMD );

	assert ( g_ImageMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24 );
	assert ( g_DepthMD.PixelFormat() == XN_PIXEL_FORMAT_GRAYSCALE_16_BIT );

	// 
	// Create OpenCV Showing Window and Related Data Structures
	// 

	cv::namedWindow ( IMAGE_WIN_NAME, CV_WINDOW_AUTOSIZE );
	cv::namedWindow ( DEPTH_WIN_NAME, CV_WINDOW_AUTOSIZE );

	cv::Mat depthImgMat  ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_16UC1 );
	cv::Mat depthImgShow ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_8UC3  );
	cv::Mat colorImgMat  ( g_ImageMD.YRes(), g_ImageMD.XRes(), CV_8UC3 );

#define ARTAG_DEBUG

#ifdef ARTAG_DEBUG
	
	cv::setMouseCallback ( IMAGE_WIN_NAME, ClickOnMouse, 0 );

#endif

	bool flipColor = true;

	// 
	// Start to Loop
	// 

	while ( ctlWndKey != ESC_KEY_VALUE ) 
	{
		// 
		// Try to Get New Frame From Kinect
		// 
	
		nRetVal = g_Context.WaitAnyUpdateAll ();

		g_DepthGen.GetMetaData ( g_DepthMD );
		g_ImageGen.GetMetaData ( g_ImageMD );

		assert ( g_DepthMD.FullXRes() == g_DepthMD.XRes() && g_DepthMD.FullYRes() == g_DepthMD.YRes() );
		assert ( g_ImageMD.FullXRes() == g_ImageMD.XRes() && g_ImageMD.FullYRes() == g_ImageMD.YRes() );

		GlobalUtility::CopyColorRawBufToCvMat8uc3 ( (const XnRGB24Pixel *)(g_ImageMD.Data()), colorImgMat );

#ifdef SHOW_DEPTH_WINDOW

		GlobalUtility::CopyDepthRawBufToCvMat16u  ( (const XnDepthPixel *)(g_DepthMD.Data()), depthImgMat );
		// GlobalUtility::ConvertDepthCvMat16uToYellowCvMat ( depthImgMat, depthImgShow );
		GlobalUtility::ConvertDepthCvMat16uToGrayCvMat ( depthImgMat, depthImgShow );

		cv::imshow ( DEPTH_WIN_NAME, depthImgShow );

#endif

		ctlWndKey = cvWaitKey ( 15 );

		if ( ctlWndKey == 'f' || ctlWndKey == 'F' ) 
		{
			artagHelper.Clear();

			artagHelper.FindMarkerCorners ( (unsigned char *)(g_ImageMD.Data()) );
			artagHelper.PrintMarkerCornersPos2dInCam ();
			extrCalibrator.ExtrCalib ( artagHelper );

			std::cout << "\nKinect Extr Matrix:" << std::endl;
			extrCalibrator.PrintMatrix ( extrCalibrator.GetMatrix ( ExtrCalibrator::EXTR ) );

			std::cout	<< "Reprojection ERROR = " 
						<< extrCalibrator.ComputeReprojectionErr ( artagHelper ) << std::endl
						// << extrCalibrator.ComputeReprojectionErr ( artagHelper.m_MarkerCornerPosCam2d, artagHelper.m_MarkerCornerPos3d, 24 ) << std::endl
						<< "Valid Marker Number = " << artagHelper.GetValidMarkerNumber() << std::endl
						<< std::endl;

			extrCalibrator.SaveMatrix ( ExtrCalibrator::EXTR, KINECT_EXTR_FILE );
		}
		if ( ctlWndKey == 's' || ctlWndKey == 'S' )
		{
			flipColor = !flipColor;
		}

		if ( flipColor ) { 
			cv::cvtColor ( colorImgMat, colorImgMat, CV_RGB2BGR );
		}

		artagHelper.DrawMarkersInCameraImage ( colorImgMat );
		cv::imshow ( IMAGE_WIN_NAME, colorImgMat );
	}

	g_Context.Release ();
	
	system	( "pause" );
	exit	( EXIT_SUCCESS );

}
Пример #8
0
void KinectDataPub::spin(bool async)
{
  static bool onceThrough = false;

  if (!onceThrough){
    XnStatus nRetVal = XN_STATUS_OK;

    nRetVal = context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");
    onceThrough = true;

    printf("Starting to run\n");
    if (needPose){
      printf("Assume calibration pose\n");
    }
  }

  XnUserID aUsers[MAX_NUM_USERS];
  XnUInt16 nUsers;
  SkeletonInfo skels[MAX_NUM_USERS];
  SensorMsg msg;
  xn::DepthMetaData depthMD;
  xn::SceneMetaData sceneMD;

  while(!xnOSWasKeyboardHit()){
    context.WaitOneUpdateAll(userGen);

    /**
     * nUsers is used both as input and output parameters.
     * When it's used as input parameter, it indicates the size of the user ID
     * array, while, when it's used as output parameter, it means the number
     * of users recognized. Therefore, it needs to be re-initialized in a loop.
     */
    nUsers = MAX_NUM_USERS;
    userGen.GetUsers(aUsers, nUsers);
    memset(&msg, 0, sizeof(SensorMsg));

    for (XnUInt16 i = 0; i < nUsers; ++i){
      if (userGen.GetSkeletonCap().IsTracking(aUsers[i])){
	// get the user's skeleton
	skels[i].user = aUsers[i];
	getSkeleton(skels[i]);
      }
      else
	skels[i].user = 0; // user is not tracked
    }

    depthGen.GetMetaData(depthMD); // depth map
    userGen.GetUserPixels(0, sceneMD); // labels

    msg.nUsers = nUsers; // num of users detected, either tracked or not
    msg.pSkels = skels;
    msg.pDepthMD = &depthMD;
    msg.pSceneMD = &sceneMD;

    for (int i = 0; i < subscribers.size(); ++i)
      subscribers[i]->notify(RAD_SENSOR_MSG, &msg);

    if (async)
      return;
  }
}
/* The matlab mex function */
void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[] )
{
    XnUInt64 *MXadress;
    double *Pos;
    
    int Jdimsc[2];
    Jdimsc[0]=225; Jdimsc[1]=7;
    plhs[0] = mxCreateNumericArray(2, Jdimsc, mxDOUBLE_CLASS, mxREAL);
    Pos = mxGetPr(plhs[0]);
     
    if(nrhs==0)
    {
       printf("Open failed: Give Pointer to Kinect as input\n");
       mexErrMsgTxt("Kinect Error"); 
    }
        
    MXadress = (XnUInt64*)mxGetData(prhs[0]);
    if(MXadress[0]>0){ g_Context = ((xn::Context*) MXadress[0])[0]; }
    if(MXadress[2]>0)
	{ 
		g_DepthGenerator = ((xn::DepthGenerator*) MXadress[2])[0]; 
	}
	else
	{
		mexErrMsgTxt("No Depth Node in Kinect Context"); 
	}
    if(MXadress[4]>0)
	{ 
		g_UserGenerator = ((xn::UserGenerator*) MXadress[4])[0]; 
	}
    else
	{
		mexErrMsgTxt("No User Node in Kinect Context"); 
	}

    XnStatus nRetVal = XN_STATUS_OK;

    XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
    {
        printf("Supplied user generator doesn't support skeleton\n");
        return;
    }
    g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);

    if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
    {
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
        {
            printf("Pose required, but not supported\n");
            return;
        }
        g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);
        g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
    }

    g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
 
    char strLabel[50] = "";
    XnUserID aUsers[15];
    XnUInt16 nUsers = 15;
    int r=0;
    xn::SceneMetaData sceneMD;
	xn::DepthMetaData depthMD;
	
    // Process the data
    g_DepthGenerator.GetMetaData(depthMD);
    g_UserGenerator.GetUserPixels(0, sceneMD);
    g_UserGenerator.GetUsers(aUsers, nUsers);
 	for (int i = 0; i < nUsers; ++i)
 	{
        if (g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i]))
		{
            //printf(strLabel, "%d - Looking for pose", aUsers[i]);
   
            XnSkeletonJointPosition joint[15];
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_HEAD, joint[0]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_NECK, joint[1]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_LEFT_SHOULDER, joint[2]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_LEFT_ELBOW, joint[3]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_LEFT_HAND, joint[4]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_RIGHT_SHOULDER, joint[5]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_RIGHT_ELBOW, joint[6]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_RIGHT_HAND, joint[7]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_TORSO, joint[8]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_LEFT_HIP, joint[9]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_LEFT_KNEE, joint[10]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_LEFT_FOOT, joint[11]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_RIGHT_HIP, joint[12]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_RIGHT_KNEE, joint[13]);
            g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i],XN_SKEL_RIGHT_FOOT, joint[14]);

            XnPoint3D pt[1];
            for(int j=0; j<15; j++)
            {
                Pos[j            +r]=aUsers[i];
                Pos[j+Jdimsc[0]  +r]=joint[j].fConfidence;
                Pos[j+Jdimsc[0]*2+r]=joint[j].position.X;
                Pos[j+Jdimsc[0]*3+r]=joint[j].position.Y;
                Pos[j+Jdimsc[0]*4+r]=joint[j].position.Z;
                pt[0] = joint[j].position;
                g_DepthGenerator.ConvertRealWorldToProjective(1, pt, pt);
                Pos[j+Jdimsc[0]*5+r]=pt[0].X;
                Pos[j+Jdimsc[0]*6+r]=pt[0].Y;
            }        
            r+=15;
        }
     }
            
}
Пример #10
0
	void matrixCalc(void *outputs)
	{
		TML::Matrix out1(outputs, 0);
		TML::Matrix out2(outputs, 1);
		TML::Matrix out3(outputs, 2);
		TML::Matrix out4(outputs, 3);
		
		xn::DepthMetaData depthMD;
		xn::SceneMetaData sceneMD;
		xn::ImageMetaData imageMD;
		
		depth.GetMetaData(depthMD);
		user.GetUserPixels(0, sceneMD);
		image.GetMetaData(imageMD);
		
		context.WaitNoneUpdateAll();
		
		t_jit_matrix_info tmi;
		memset(&tmi, 0, sizeof(tmi));
		tmi.dimcount = 2;
		tmi.planecount = 1;
		tmi.dimstride[0] = 4;
		tmi.dimstride[1] = depthMD.XRes()*4;
		int width = tmi.dim[0] = depthMD.XRes();
		int height = tmi.dim[1] = depthMD.YRes();
		tmi.type = _jit_sym_float32;
		
		out1.resizeTo(&tmi);
		
		tmi.planecount = 1;
		tmi.dimstride[0] = 1;
		tmi.dimstride[1] = depthMD.XRes();
		tmi.type = _jit_sym_char;
		out2.resizeTo(&tmi);
		
		tmi.planecount = 4;
		tmi.dimstride[0] = 4;
		tmi.dimstride[1] = depthMD.XRes()*4;
		tmi.type = _jit_sym_char;
		out3.resizeTo(&tmi);
		
		const XnDepthPixel* pDepth = depthMD.Data();
		float *depthData = (float*)out1.data();
		
		//Copy depth data
		int x,y;
		for (y=0; y<height; y++)
		{
			for (x=0; x<width; x++)
			{
				depthData[0] = (float)pDepth[0]/powf(2, 15);
				
				depthData++;
				pDepth++;
			}
		}
		
		//Get the users
		unsigned char *userData = (unsigned char*)out2.data();
		const XnLabel* pLabels = sceneMD.Data();
		for (y=0; y<height; y++)
		{
			for (x=0; x<width; x++)
			{
				userData[0] = pLabels[0];
				
				userData++;
				pLabels++;
			}
		}
		
		//Get the colors
		const XnRGB24Pixel* pPixels = imageMD.RGB24Data();
		unsigned char *pixData = (unsigned char*)out3.data();
		for (y=0; y<height; y++)
		{
			for (x=0; x<width; x++)
			{
				pixData[0] = 0;
				pixData[1] = pPixels[0].nRed;
				pixData[2] = pPixels[0].nGreen;
				pixData[3] = pPixels[0].nBlue;
				
				pixData+=4;
				pPixels++;
			}
		}
		
		//For all the users -- output the joint info...
		XnUserID aUsers[15];
		XnUInt16 nUsers = 15;
		user.GetUsers(aUsers, nUsers);
		
		int rUsers = 0;
		
		xn::SkeletonCapability sc = user.GetSkeletonCap();
		
		int i;
		for (i=0; i<nUsers; i++)
		{
			if (user.GetSkeletonCap().IsTracking(aUsers[i]))
				rUsers++;
		}
		
		tmi.dimcount = 2;
		tmi.planecount = 3;
		tmi.dimstride[0] = 3*4;
		tmi.dimstride[1] = 24*3*4;
		tmi.dim[0] = 24;
		tmi.dim[1] = rUsers;
		tmi.type = _jit_sym_float32;
		out4.resizeTo(&tmi);
		
		
		float *sData = (float*)out4.data();
		
			
		if (rUsers == 0)
		{
			int n;
			for (n=0; n<24; n++)
			{					
				sData[0] = 0;
				sData[1] = 0;
				sData[2] = 0;
				
				sData+=3;
			}
		}
		else
		{
			for (i=0; i<nUsers; i++)
			{
				if (user.GetSkeletonCap().IsTracking(aUsers[i]))
				{
					int n;
					for (n=0; n<24; n++)
					{
						XnSkeletonJointPosition jp;
						user.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], (XnSkeletonJoint)(n+1), jp);
						
						sData[0] = (1280 - jp.position.X) / 2560;
						sData[1] = (1280 - jp.position.Y) / 2560;
						sData[2] = jp.position.Z * 7.8125 / 10000;
						
//						if (n == 0)
//						{
//							post("%f %f %f\n", sData[0], sData[1], sData[2]);
//						}
						
						sData+=3;
					}
				}
			}
		}
		//post("%i\n", rUsers);
	}
Пример #11
0
void start_kinect() {
    XnStatus nRetVal = XN_STATUS_OK;
    xn::EnumerationErrors errors;
    UsersCount = 0;

    const char *fn = NULL;
    if    (fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
    else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
    else {
        printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
        //return XN_STATUS_ERROR;
    }
    printf("Reading config from: '%s'\n", fn);

    nRetVal = g_Context.InitFromXmlFile(fn, g_scriptNode, &errors);
    if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
    {
        XnChar strError[1024];
        errors.ToString(strError, 1024);
        printf("%s\n", strError);
        //return (nRetVal);
    }
    else if (nRetVal != XN_STATUS_OK)
    {
        printf("Open failed: %s\n", xnGetStatusString(nRetVal));
        //return (nRetVal);
    }

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);      
    CHECK_RC(nRetVal,"No depth");
    
	  nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);      
    CHECK_RC(nRetVal,"No image");

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    if (nRetVal != XN_STATUS_OK)
    {
        nRetVal = g_UserGenerator.Create(g_Context);
        CHECK_RC(nRetVal, "Find user generator");
    }

    XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
    {
        printf("Supplied user generator doesn't support skeleton\n");
        //return 1;
    }
    nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    CHECK_RC(nRetVal, "Register to user callbacks");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
    CHECK_RC(nRetVal, "Register to calibration start");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
    CHECK_RC(nRetVal, "Register to calibration complete");

    if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
    {
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
        {
            printf("Pose required, but not supported\n");
            //return 1;
        }
        nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
        CHECK_RC(nRetVal, "Register to Pose Detected");
        g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
    }

    g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");

    XnUserID aUsers[MAX_NUM_USERS];
    XnUInt16 nUsers;

    XnSkeletonJointTransformation anyjoint;

    printf("Starting to run\n");
    if(g_bNeedPose)
    {
        printf("Assume calibration pose\n");
    }
    XnUInt32 epochTime = 0;
    while (!xnOSWasKeyboardHit())
    {
        g_Context.WaitOneUpdateAll(g_UserGenerator);
        // print the torso information for the first user already tracking
        nUsers=MAX_NUM_USERS;
        g_UserGenerator.GetUsers(aUsers, nUsers);
        int numTracked=0;
        int userToPrint=-1;

        WriteLock w_lock(myLock);
    	  pDepthMap = g_depth.GetDepthMap();
        pPixelMap = g_image.GetRGB24ImageMap();
            
    	  g_depth.GetMetaData(g_depthMD);    
        g_image.GetMetaData(g_imageMD);
        pPixelPoint = g_imageMD.RGB24Data();


        for(XnUInt16 i=0; i<nUsers; i++) {
    			if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])==FALSE)
    				continue;
    			{
    				
    				
    				/* Writing all new movements into structure*/
    				/* Head */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_HEAD,anyjoint);
    				Skeletons[i]["Head"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["Head"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["Head"]["Z"] = anyjoint.position.position.Z;
    				/* Neck */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_NECK,anyjoint);
    				Skeletons[i]["Neck"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["Neck"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["Neck"]["Z"] = anyjoint.position.position.Z;
    				/* Left Shoulder */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_SHOULDER,anyjoint);
    				Skeletons[i]["LeftShoulder"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["LeftShoulder"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["LeftShoulder"]["Z"] = anyjoint.position.position.Z;
    				/* Right Shoulder */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_SHOULDER,anyjoint);
    				Skeletons[i]["RightShoulder"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["RightShoulder"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["RightShoulder"]["Z"] = anyjoint.position.position.Z;
    				/* Torso */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_TORSO,anyjoint);
    				Skeletons[i]["Torso"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["Torso"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["Torso"]["Z"] = anyjoint.position.position.Z;
    				/* Left Elbow */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_ELBOW,anyjoint);
    				Skeletons[i]["LeftElbow"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["LeftElbow"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["LeftElbow"]["Z"] = anyjoint.position.position.Z;
    				/* Right Elbow */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_ELBOW,anyjoint);
    				Skeletons[i]["RightElbow"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["RightElbow"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["RightElbow"]["Z"] = anyjoint.position.position.Z;
    				/* Left Hip */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_HIP,anyjoint);
    				Skeletons[i]["LeftHip"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["LeftHip"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["LeftHip"]["Z"] = anyjoint.position.position.Z;
    				/* Right Hip */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_HIP,anyjoint);
    				Skeletons[i]["RightHip"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["RightHip"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["RightHip"]["Z"] = anyjoint.position.position.Z;
    				/* Left Hand */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_HAND,anyjoint);
    				Skeletons[i]["LeftHand"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["LeftHand"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["LeftHand"]["Z"] = anyjoint.position.position.Z;
    				/* Right Hand */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_HAND,anyjoint);
    				Skeletons[i]["RightHand"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["RightHand"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["RightHand"]["Z"] = anyjoint.position.position.Z;
    				/* Left Knee */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_KNEE,anyjoint);
    				Skeletons[i]["LeftKnee"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["LeftKnee"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["LeftKnee"]["Z"] = anyjoint.position.position.Z;
    				/* Right Knee */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_KNEE,anyjoint);
    				Skeletons[i]["RightKnee"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["RightKnee"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["RightKnee"]["Z"] = anyjoint.position.position.Z;
    				/* Left Foot */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_FOOT,anyjoint);
    				Skeletons[i]["LeftFoot"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["LeftFoot"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["LeftFoot"]["Z"] = anyjoint.position.position.Z;
    				/* Right Foot */
    				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_FOOT,anyjoint);
    				Skeletons[i]["RightFoot"]["X"] = anyjoint.position.position.X;
    				Skeletons[i]["RightFoot"]["Y"] = anyjoint.position.position.Y;
    				Skeletons[i]["RightFoot"]["Z"] = anyjoint.position.position.Z;
    
    				/*printf("user %d: head at (%6.2f,%6.2f,%6.2f)\n",aUsers[i],
                                                                      Skeletons[i]["Head"]["X"],
                                                                      Skeletons[i]["Head"]["Y"],
                                                                      Skeletons[i]["Head"]["Z"]);*/
    			}				
        }
        
    }
    g_scriptNode.Release();
    g_depth.Release();
    g_image.Release();
    g_UserGenerator.Release();
    g_Context.Release();
}
Пример #12
0
bool initialize(){

	std::cout << "initializing kinect... " << endl;

#ifdef USE_MS_SKD

	int iSensorCount = 0;
    if ( NuiGetSensorCount(&iSensorCount) < 0 )
		return false;

    // Look at each Kinect sensor
    for (int i = 0; i < iSensorCount; ++i)
    {
        // Create the sensor so we can check status, if we can't create it, move on to the next
        if ( NuiCreateSensorByIndex(i, &g_pNuiSensor) < 0 ) continue;
      
		// Get the status of the sensor, and if connected, then we can initialize it
        if( 0== g_pNuiSensor->NuiStatus() ){
            g_pNuiSensor = g_pNuiSensor;
            break;
        }

        // This sensor wasn't OK, so release it since we're not using it
        g_pNuiSensor->Release();
    }

    if (NULL != g_pNuiSensor)
    {
        // Initialize the Kinect and specify that we'll be using depth
        if ( g_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_DEPTH) >= 0 )
        {
            // Create an event that will be signaled when depth data is available
            g_hNextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);

            // Open a depth image stream to receive depth frames
            g_pNuiSensor->NuiImageStreamOpen(
                NUI_IMAGE_TYPE_DEPTH,
                NUI_IMAGE_RESOLUTION_640x480,
                0,
                2,
                g_hNextDepthFrameEvent,
                &g_pDepthStreamHandle);
        }
    }
	else
        return false;
	
#endif

#ifdef USE_OPENNI

	// Initialize context object
	g_RetVal = g_Context.Init();

	g_RetVal = g_DepthGenerator.Create(g_Context);
	if (g_RetVal != XN_STATUS_OK)
		printf("Failed creating DEPTH generator %s\n", xnGetStatusString(g_RetVal));

	XnMapOutputMode outputMode;
	outputMode.nXRes = g_im_w;
	outputMode.nYRes = g_im_h;
	outputMode.nFPS = g_fps;
	g_RetVal = g_DepthGenerator.SetMapOutputMode(outputMode);
	if (g_RetVal != XN_STATUS_OK){
		printf("Failed setting the DEPTH output mode %s\n", xnGetStatusString(g_RetVal));
		return false;
	}

	g_RetVal = g_Context.StartGeneratingAll();
	if (g_RetVal != XN_STATUS_OK){
		printf("Failed starting generating all %s\n", xnGetStatusString(g_RetVal));
		return false;
	}
#endif

#ifdef USE_LIBFREENECT
  depth_mid = (uint16_t*)malloc(640*480*2);
  depth_front = (uint16_t*)malloc(640*480*2);

	if (freenect_init(&f_ctx, NULL) < 0) {
		printf("freenect_init() failed\n");
		return 1;
	}
	
	freenect_set_log_level(f_ctx, FREENECT_LOG_ERROR);
	freenect_select_subdevices(f_ctx, (freenect_device_flags)(FREENECT_DEVICE_MOTOR | FREENECT_DEVICE_CAMERA));
	
	int nr_devices = freenect_num_devices (f_ctx);
	printf ("Number of devices found: %d\n", nr_devices);
	
	int user_device_number = 0;
	
	if (nr_devices < 1) {
		freenect_shutdown(f_ctx);
		return false;
	}
	
	if (freenect_open_device(f_ctx, &f_dev, user_device_number) < 0) {
		printf("Could not open device\n");
		freenect_shutdown(f_ctx);
		return false;
	}
	
	bool res = pthread_create(&freenect_thread, NULL, freenect_threadfunc, NULL);
	if (res) {
		printf("pthread_create failed\n");
		freenect_shutdown(f_ctx);
		return false;
	}
	freenect_raw_tilt_state state;
	freenect_get_tilt_status(&state);
	
	freenect_angle = freenect_get_tilt_degs(&state);
	
#endif
	
	g_im3D.create(g_im_h,g_im_w,CV_32FC3);
	g_imD.create(g_im_h,g_im_w,CV_16UC1);

	return true;
}
Пример #13
0
int main ( int argc, char * argv[] )
{
	// 
	// Initialize OpenNI Settings
	// 

	XnStatus nRetVal = XN_STATUS_OK;
	xn::ScriptNode scriptNode;
	xn::EnumerationErrors errors;

	// 
	// Initialize Context Object
	// 

	nRetVal = g_Context.InitFromXmlFile ( CONFIG_XML_PATH, scriptNode, &errors );
	
	if ( nRetVal == XN_STATUS_NO_NODE_PRESENT ) {
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		printf ( "XN_STATUS_NO_NODE_PRESENT:\n%s\n", strError );
		system ( "pause" );

		return ( nRetVal );
	}
	else if ( nRetVal != XN_STATUS_OK ) {
		printf ( "Open failed: %s\n", xnGetStatusString(nRetVal) );	
		system ( "pause" );

		return ( nRetVal );
	}

	// 
	// Handle Image & Depth Generator Node
	// 

	bool colorFlag = true;
	bool depthFlag = true;

	nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_DEPTH, g_DepthGen );
	if ( nRetVal != XN_STATUS_OK ) {
		printf("No depth node exists!\n");
		depthFlag = false;
	}
	nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_IMAGE, g_ImageGen );
	if ( nRetVal != XN_STATUS_OK ) {
		printf("No image node exists!\n");
		colorFlag = false;
	}

	// g_DepthGen.GetAlternativeViewPointCap().SetViewPoint( g_ImageGen );

	if ( depthFlag ) {
		g_DepthGen.GetMetaData ( g_DepthMD );
		assert ( g_DepthMD.PixelFormat() == XN_PIXEL_FORMAT_GRAYSCALE_16_BIT );
	}
	if ( colorFlag ) {
		g_ImageGen.GetMetaData ( g_ImageMD );
		assert ( g_ImageMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24 );
	}

	g_DepthImgShow = cv::Mat ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_8UC1  );
	g_DepthImgMat  = cv::Mat ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_16UC1 );
	g_ColorImgMat  = cv::Mat ( g_ImageMD.YRes(), g_ImageMD.XRes(), CV_8UC3  );
	
	// 
	// Start to Loop
	// 

	bool flipColor = true;
	int ctlWndKey = -1;

	g_StartTickCount = GetTickCount();
	g_HeadTrackingFrameCount = 0;
	
	while ( ctlWndKey != ESC_KEY_VALUE ) 
	{
		nRetVal = g_Context.WaitOneUpdateAll ( g_DepthGen );
		// nRetVal = g_Context.WaitAnyUpdateAll();

#ifdef HANDLING_IMAGE_DATA

		if ( colorFlag ) 
		{
			g_ImageGen.GetMetaData ( g_ImageMD );

			assert ( g_ImageMD.FullXRes() == g_ImageMD.XRes() );
			assert ( g_ImageMD.FullYRes() == g_ImageMD.YRes() );

			GlobalUtility::CopyColorRawBufToCvMat8uc3 ( (const XnRGB24Pixel *)(g_ImageMD.Data()), g_ColorImgMat );
	
			if ( ctlWndKey == 's' || ctlWndKey == 'S' ) {												// Switch
				flipColor = !flipColor;
			}
			if ( flipColor ) {
				cv::cvtColor ( g_ColorImgMat, g_ColorImgMat, CV_RGB2BGR );
			}

			cv::namedWindow ( IMAGE_WIN_NAME, CV_WINDOW_AUTOSIZE );
			cv::imshow ( IMAGE_WIN_NAME, g_ColorImgMat );
		}

#endif

#ifdef HANDLING_DEPTH_DATA

		if ( depthFlag ) 
		{
			g_DepthGen.GetMetaData(g_DepthMD);
		
			// assert ( g_DepthMD.FullXRes() == g_DepthMD.XRes() );
			// assert ( g_DepthMD.FullYRes() == g_DepthMD.YRes() );

			GlobalUtility::CopyDepthRawBufToCvMat16u ( (const XnDepthPixel *)(g_DepthMD.Data()), g_DepthImgMat );
			GlobalUtility::ConvertDepthCvMat16uToGrayCvMat ( g_DepthImgMat, g_DepthImgShow );

			/*
			cv::putText(colorImgMat, 
						GlobalUtility::DoubleToString(g_ImageMD.FPS()) + " FPS", 
						cv::Point(10, 450), 
						cv::FONT_ITALIC, 
						0.7, 
						cv::Scalar(255, 255, 255, 0),
						2,
						8,
						false);
						*/

			cv::namedWindow ( DEPTH_WIN_NAME, CV_WINDOW_AUTOSIZE );
			cv::imshow ( DEPTH_WIN_NAME, g_DepthImgShow );
		}

#endif

		XnFieldOfView fov;
		g_DepthGen.GetFieldOfView( fov );

		std::cout	<< "HFov = " << fov.fHFOV << std::endl
					<< "VFov = " << fov.fVFOV << std::endl;

		ctlWndKey = cvWaitKey ( 5 );

		g_HeadTrackingFrameCount++;
		g_CurrTickCount = GetTickCount();
		std::cout	<< "FPS = " 
					<< 1000 / ( ( double )( g_CurrTickCount - g_StartTickCount ) / ( double )( g_HeadTrackingFrameCount ) ) 
					<< std::endl;
	}

	g_Context.Release ();

	exit ( EXIT_SUCCESS );
}
Пример #14
0
int main() {

	const unsigned int nBackgroundTrain = 30;
	const unsigned short touchDepthMin = 10;
	const unsigned short touchDepthMax = 20;
	const unsigned int touchMinArea = 50;

	const bool localClientMode = true; 					// connect to a local client

	const double debugFrameMaxDepth = 4000; // maximal distance (in millimeters) for 8 bit debug depth frame quantization
	const char* windowName = "Debug";
	const Scalar debugColor0(0,0,128);
	const Scalar debugColor1(255,0,0);
	const Scalar debugColor2(255,255,255);

	int xMin = 110;
	int xMax = 560;
	int yMin = 120;
	int yMax = 320;

	Mat1s depth(480, 640); // 16 bit depth (in millimeters)
	Mat1b depth8(480, 640); // 8 bit depth
	Mat3b rgb(480, 640); // 8 bit depth

	Mat3b debug(480, 640); // debug visualization

	Mat1s foreground(640, 480);
	Mat1b foreground8(640, 480);

	Mat1b touch(640, 480); // touch mask

	Mat1s background(480, 640);
	vector<Mat1s> buffer(nBackgroundTrain);

	CHECK_RC(initOpenNI("niConfig.xml"), "initOpenNI");

	// TUIO server object
	TuioServer* tuio;
	if (localClientMode) {
		tuio = new TuioServer();
	} else {
		tuio = new TuioServer("192.168.0.1",3333,false);
	}
	TuioTime time;

	// create some sliders
	namedWindow(windowName);
	createTrackbar("xMin", windowName, &xMin, 640);
	createTrackbar("xMax", windowName, &xMax, 640);
	createTrackbar("yMin", windowName, &yMin, 480);
	createTrackbar("yMax", windowName, &yMax, 480);

	// create background model (average depth)
	for (unsigned int i=0; i<nBackgroundTrain; i++) {
		CHECK_RC(xnContext.WaitAndUpdateAll(), "xnContext.WaitAndUpdateAll()");
		depth.data = (uchar*) xnDepthGenerator.GetDepthMap();
		buffer[i] = depth;
	}
	average(buffer, background);

	while ( waitKey(1) != 27 ) {
		// read available data
		xnContext.WaitAndUpdateAll();

		// update 16 bit depth matrix
		depth.data = (uchar*) xnDepthGenerator.GetDepthMap();
		//xnImgeGenertor.GetGrayscale8ImageMap()



		// update rgb image
		//rgb.data = (uchar*) xnImgeGenertor.GetRGB24ImageMap(); // segmentation fault here
		//cvtColor(rgb, rgb, CV_RGB2BGR);

		// extract foreground by simple subtraction of very basic background model
		foreground = background - depth;

		// find touch mask by thresholding (points that are close to background = touch points)
		touch = (foreground > touchDepthMin) & (foreground < touchDepthMax);

		// extract ROI
		Rect roi(xMin, yMin, xMax - xMin, yMax - yMin);
		Mat touchRoi = touch(roi);

		// find touch points
		vector< vector<Point2i> > contours;
		vector<Point2f> touchPoints;
		findContours(touchRoi, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE, Point2i(xMin, yMin));
		for (unsigned int i=0; i<contours.size(); i++) {
			Mat contourMat(contours[i]);
			// find touch points by area thresholding
			if ( contourArea(contourMat) > touchMinArea ) {
				Scalar center = mean(contourMat);
				Point2i touchPoint(center[0], center[1]);
				touchPoints.push_back(touchPoint);
			}
		}

		// send TUIO cursors
		time = TuioTime::getSessionTime();
		tuio->initFrame(time);

		for (unsigned int i=0; i<touchPoints.size(); i++) { // touch points
			float cursorX = (touchPoints[i].x - xMin) / (xMax - xMin);
			float cursorY = 1 - (touchPoints[i].y - yMin)/(yMax - yMin);
			TuioCursor* cursor = tuio->getClosestTuioCursor(cursorX,cursorY);
			// TODO improve tracking (don't move cursors away, that might be closer to another touch point)
			if (cursor == NULL || cursor->getTuioTime() == time) {
				tuio->addTuioCursor(cursorX,cursorY);
			} else {
				tuio->updateTuioCursor(cursor, cursorX, cursorY);
			}
		}

		tuio->stopUntouchedMovingCursors();
		tuio->removeUntouchedStoppedCursors();
		tuio->commitFrame();

		// draw debug frame
		depth.convertTo(depth8, CV_8U, 255 / debugFrameMaxDepth); // render depth to debug frame
		cvtColor(depth8, debug, CV_GRAY2BGR);
		debug.setTo(debugColor0, touch);  // touch mask
		rectangle(debug, roi, debugColor1, 2); // surface boundaries
		for (unsigned int i=0; i<touchPoints.size(); i++) { // touch points
			circle(debug, touchPoints[i], 5, debugColor2, CV_FILLED);
		}

		// render debug frame (with sliders)
		imshow(windowName, debug);
		//imshow("image", rgb);
	}

	return 0;
}
Пример #15
0
// this function is called each frame
void glutDisplay (void)
{

	if(gScene == NULL) return;
	
	gScene->simulate(1.0f/30.0f);
	glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);

	// 射影マトリックス
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	gluPerspective(100.0f, (float)glutGet(GLUT_WINDOW_WIDTH)/(float)glutGet(GLUT_WINDOW_HEIGHT), 1.0f, 10000.0f); // 視点の位置

	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	gluLookAt(gEye.x, gEye.y, gEye.z, gEye.x + gDir.x, gEye.y + gDir.y, gEye.z + gDir.z, 0.0f, 1.0f, 0.0f);
	
	xn::SceneMetaData sceneMD;
	xn::DepthMetaData depthMD;
	g_DepthGenerator.GetMetaData(depthMD);

	if (!g_bPause)
	{
		// Read next available data
		g_Context.WaitAndUpdateAll();
	}

	for(int i=0;i<2;i++){
		for(int j=0;j<15;j++){
			oldjpoint[i][j]=jpoint[i][j];
		}
	}
	// Process the data
	g_DepthGenerator.GetMetaData(depthMD);
	g_UserGenerator.GetUserPixels(0, sceneMD);
	CalculateJoint();

	head.x=(jpoint[0][0].X);
	head.y=(jpoint[0][0].Y);
	head.z=(jpoint[0][0].Z);

	neck.x=(jpoint[0][1].X);
	neck.y=(jpoint[0][1].Y);
	neck.z=(jpoint[0][1].Z);
	
	rshoulder.x=(jpoint[0][2].X);
	rshoulder.y=(jpoint[0][2].Y);
	rshoulder.z=(jpoint[0][2].Z);

	relbow.x=(jpoint[0][3].X*2+oldjpoint[0][3].X)/3;
	relbow.y=(jpoint[0][3].Y*2+oldjpoint[0][3].Y)/3;
	relbow.z=(jpoint[0][3].Z*2+oldjpoint[0][3].Z)/3;

	rhand.x=(jpoint[0][4].X*2+oldjpoint[0][4].X)/3;
	rhand.y=(jpoint[0][4].Y*2+oldjpoint[0][4].Y)/3;
	rhand.z=(jpoint[0][4].Z*2+oldjpoint[0][4].Z)/3;

	lshoulder.x=(jpoint[0][5].X*2+oldjpoint[0][5].X)/3;
	lshoulder.y=(jpoint[0][5].Y*2+oldjpoint[0][5].Y)/3;
	lshoulder.z=(jpoint[0][5].Z*2+oldjpoint[0][5].Z)/3;

	lelbow.x=(jpoint[0][6].X*2+oldjpoint[0][6].X)/3;
	lelbow.y=(jpoint[0][6].Y*2+oldjpoint[0][6].Y)/3;
	lelbow.z=(jpoint[0][6].Z*2+oldjpoint[0][6].Z)/3;

	lhand.x=(jpoint[0][7].X*2+oldjpoint[0][7].X)/3;
	lhand.y=(jpoint[0][7].Y*2+oldjpoint[0][7].Y)/3;
	lhand.z=(jpoint[0][7].Z*2+oldjpoint[0][7].Z)/3;

	torso.x=(jpoint[0][8].X*2+oldjpoint[0][8].X)/3;
	torso.y=(jpoint[0][8].Y*2+oldjpoint[0][8].Y)/3;
	torso.z=(jpoint[0][8].Z*2+oldjpoint[0][8].Z)/3;

	rhip.x=(jpoint[0][9].X*2+oldjpoint[0][9].X)/3;
	rhip.y=(jpoint[0][9].Y*2+oldjpoint[0][9].Y)/3;
	rhip.z=(jpoint[0][9].Z*2+oldjpoint[0][9].Z)/3;

	rknee.x=(jpoint[0][10].X*2+oldjpoint[0][10].X)/3;
	rknee.y=(jpoint[0][10].Y*2+oldjpoint[0][10].Y)/3;
	rknee.z=(jpoint[0][10].Z*2+oldjpoint[0][10].Z)/3;

	rfoot.x=(jpoint[0][11].X*2+oldjpoint[0][11].X)/3;
	rfoot.y=(jpoint[0][11].Y*2+oldjpoint[0][11].Y)/3;
	rfoot.z=(jpoint[0][11].Z*2+oldjpoint[0][11].Z)/3;

	lhip.x=(jpoint[0][12].X*2+oldjpoint[0][12].X)/3;
	lhip.y=(jpoint[0][12].Y*2+oldjpoint[0][12].Y)/3;
	lhip.z=(jpoint[0][12].Z*2+oldjpoint[0][12].Z)/3;

	lknee.x=(jpoint[0][13].X*2+oldjpoint[0][13].X)/3;
	lknee.y=(jpoint[0][13].Y*2+oldjpoint[0][13].Y)/3;
	lknee.z=(jpoint[0][13].Z*2+oldjpoint[0][13].Z)/3;

	lfoot.x=(jpoint[0][14].X*2+oldjpoint[0][14].X)/3;
	lfoot.y=(jpoint[0][14].Y*2+oldjpoint[0][14].Y)/3;
	lfoot.z=(jpoint[0][14].Z*2+oldjpoint[0][14].Z)/3;

	printf("%f, %f, %f\n",rightreduction.x, rightreduction.y, rightreduction.z);
	printf("%f, %f, %f\n",leftreduction.x, leftreduction.y, leftreduction.z);

	if(jpoint[0][8].X!=0.0&&jpoint[0][8].Y!=0.0&&jpoint[0][8].Z!=0.0){

		Head->setGlobalPosition(NxVec3(-head.x+positionx, -head.y+positiony, -head.z+positionz));
		Head->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));

		Neck->setGlobalPosition(NxVec3(-neck.x+positionx, -neck.y+positiony, -neck.z+positionz));
		Neck->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));

		rightshoulder->setGlobalPosition(NxVec3(-rshoulder.x+positionx, -rshoulder.y+positiony, -rshoulder.z+positionz));
		rightshoulder->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));
		rightelbow->setGlobalPosition(NxVec3(-relbow.x+positionx, -relbow.y+positiony, -relbow.z+positionz));
		rightelbow->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));
		righthand->setGlobalPosition(NxVec3(-rhand.x+positionx, -rhand.y+positiony, -rhand.z+positionz));
		righthand->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));

		leftshoulder->setGlobalPosition(NxVec3(-lshoulder.x+positionx, -lshoulder.y+positiony, -lshoulder.z+positionz));
		leftshoulder->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));
		leftelbow->setGlobalPosition(NxVec3(-lelbow.x+positionx, -lelbow.y+positiony, -lelbow.z+positionz));
		leftelbow->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));
		lefthand->setGlobalPosition(NxVec3(-lhand.x+positionx, -lhand.y+positiony, -lhand.z+positionz));
		lefthand->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));

		Torso->setGlobalPosition(NxVec3(-torso.x+positionx, -torso.y+positiony, -torso.z+positionz));
		Torso->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));

		righthip->setGlobalPosition(NxVec3(-rhip.x+positionx, -rhip.y+positiony, -rhip.z+positionz));
		righthip->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));
		rightknee->setGlobalPosition(NxVec3(-rknee.x+positionx, -rknee.y+positiony, -rknee.z+positionz));
		rightknee->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));
		rightfoot->setGlobalPosition(NxVec3(-rfoot.x+positionx, -rfoot.y+positiony, -rfoot.z+positionz));
		rightfoot->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));

		lefthip->setGlobalPosition(NxVec3(-lhip.x+positionx, -lhip.y+positiony, -lhip.z+positionz));
		lefthip->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));
		leftknee->setGlobalPosition(NxVec3(-lknee.x+positionx, -lknee.y+positiony, -lknee.z+positionz));
		leftknee->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));
		leftfoot->setGlobalPosition(NxVec3(-lfoot.x+positionx, -lfoot.y+positiony, -lfoot.z+positionz));
		leftfoot->setLinearVelocity(NxVec3(0.0f, 0.0f, 0.0f));

	}

	glPushMatrix();
	glBegin(GL_POLYGON);
	glColor4f(0.2f,1.2f,0.1f,1.0f); // 地面の色
	glVertex3f( 10000.0f,-1.0f, 10000.0f);
	glVertex3f( 10000.0f,-1.0f,-10000.0f);
	glVertex3f(-10000.0f,-1.0f,-10000.0f);
	glVertex3f(-10000.0f,-1.0f, 10000.0f);
	glEnd();
	glPopMatrix();

	//地平線の描画
	glPushMatrix();
	glColor4f(1.0f, 1.0f, 1.0f,1.0f);
	glLineWidth(1);
	glBegin(GL_LINES);
	for(int i=-10000; i< 10000; i+=500) {
		glVertex3f( i    , -0.2f,-10000 );
		glVertex3f( i    , -0.2f, 10000 );
		glVertex3f(-10000, -0.2f, i     ); 
		glVertex3f( 10000, -0.2f, i     ); 
	}
	glEnd();
	glPopMatrix();
	
	switch(gameflag){
		case 0: // スタート画面
			glViewport( 0, 0, (float)glutGet(GLUT_WINDOW_WIDTH),(float)glutGet(GLUT_WINDOW_HEIGHT) );
			glMatrixMode( GL_PROJECTION );
			glLoadIdentity();
			glOrtho( 0.0f, (float)glutGet(GLUT_WINDOW_WIDTH),(float)glutGet(GLUT_WINDOW_HEIGHT), 0.0f, 0.0f, 1.0f );
			glMatrixMode( GL_MODELVIEW );
			glLoadIdentity();

			glPushMatrix();
			glColor3d(1.0, 1.0, 0.0);
			glRasterPos3d(GL_WIN_SIZE_X/2-80, GL_WIN_SIZE_Y/2, 0.0);
			drawBitmapString(GLUT_BITMAP_HELVETICA_18,"Push 's' -> START");
			glPopMatrix();
			printf("Push 's' -> START\n");
			break;

		case 1: // ゲーム終了時
			glViewport( 0, 0, (float)glutGet(GLUT_WINDOW_WIDTH),(float)glutGet(GLUT_WINDOW_HEIGHT) );
			glMatrixMode( GL_PROJECTION );
			glLoadIdentity();
			glOrtho( 0.0f, (float)glutGet(GLUT_WINDOW_WIDTH),(float)glutGet(GLUT_WINDOW_HEIGHT), 0.0f, 0.0f, 1.0f );
			glMatrixMode( GL_MODELVIEW );
			glLoadIdentity();

			glPushMatrix();
			glColor3d(0.0, 0.0, 1.0);
			glRasterPos3d(GL_WIN_SIZE_X/2-60, GL_WIN_SIZE_Y/2+20, 0.0);
			drawBitmapString(GLUT_BITMAP_HELVETICA_18,"GAME OVER");
			glPopMatrix();
			glPushMatrix();
			glColor3d(1.0, 1.0, 0.0);
			glRasterPos3d(GL_WIN_SIZE_X/2-65, GL_WIN_SIZE_Y/2, 0.0);
			drawBitmapString(GLUT_BITMAP_HELVETICA_18,gametime);
			glPopMatrix();
			glPushMatrix();
			glColor3d(1.0, 0.0, 0.0);
			glRasterPos3d(GL_WIN_SIZE_X/2-90, GL_WIN_SIZE_Y/2-20, 0.0);
			drawBitmapString(GLUT_BITMAP_HELVETICA_18,"Push 'r' -> RESTART");
			glPopMatrix();
			printf("GAME OVER\ntime -> %f\n",(double)(t2 - t1) / CLOCKS_PER_SEC);
			break;

		case 2: // ゲーム中の画面

			glEnable(GL_DEPTH_TEST);
			// アクターの描画
			int nbActors = gScene->getNbActors();
			NxActor** actors = gScene->getActors();
			while(nbActors--)
			{
				NxActor* actor = *actors++;
				if(!actor->userData) continue;

				glPushMatrix();
				glEnable(GL_LIGHTING);
				float glMat[16];
				actor->getGlobalPose().getColumnMajor44(glMat);
				glMultMatrixf(glMat);
				myData* mydata = (myData*)actor->userData;
				int shape = mydata->shape;
				int color = mydata->color;

				switch(shape){
				case 1:
						glColor4f(0.0f,0.0f,1.0f,1.0f);
						if(mydata->houkou==0){
						link(pole[0], pole[4]);
						}
						else if(mydata->houkou==1){
						tlink(3000.0,2000.0,6000.0,-3000.0,2000.0,6000.0);
						tlink(-3000.0,2000.0,4000.0,-3000.0,2000.0,6000.0);
						}
						else if(mydata->houkou==2){
						ylink(3000.0,2000.0,4000.0,-3000.0,2000.0,4000.0);
						ylink(3000.0,2000.0,6000.0,-3000.0,2000.0,6000.0);
						}
						else if(mydata->houkou==3){ // パネル
						glColor4f(0.0f,1.0f,1.0f,1.0f);
						cuboid(mydata->width*2,mydata->length*2, mydata->height*2);
						}
						else if(mydata->houkou==4){
						}
						break;
					case 2:
						if(mydata->ball == 1){
						glColor4f(1.0f,0.0f,0.0f,1.0f); // ボールの色
						glutSolidSphere(mydata->size,15,15);
						}
						else if(mydata->ball == 0){
							glColor4f(1.0f,1.0f,0.0f,1.0f);
							glutSolidSphere(mydata->size,15,15);
						}
						break;
					case 4:
						glColor4f(1.0f,0.0f,0.0f,1.0f);
						cylinder(50, 100, 50);
						break;
				}
				glDisable(GL_LIGHTING);
				glPopMatrix();


			}

			glDisable(GL_DEPTH_TEST);
			t2 = clock();
			balltimer2 = clock();
			btime=(double)(balltimer2 - balltimer1) / CLOCKS_PER_SEC;
			gtime=(double)(t2 - t1) / CLOCKS_PER_SEC;
			printf("%f\n",gtime);

			if(gtime<=10.0){
				balldire=2.0;
				ballspeed=250.0;
			}
			else if(gtime<=20.0){
				balldire=1.5;
				ballspeed=300.0;
			}
			else if(gtime<=30.0){
				balldire=1.0;
				ballspeed=350.0;
			}
			else if(gtime<=40.0){
				balldire=0.5;
				ballspeed=400.0;
			}
			break;
	}

	// 描画の終了
	gScene->flushStream();
	gScene->fetchResults(NX_RIGID_BODY_FINISHED, true);
	glutSwapBuffers();
}
Пример #16
0
int main() {

    const unsigned int nBackgroundTrain = 30;
    const unsigned short touchDepthMin = 10;
    const unsigned short touchDepthMax = 20;
    const unsigned int touchMinArea = 50;

    const bool localClientMode = true; // connect to a local client

    const double debugFrameMaxDepth = 4000; // maximal distance (in millimeters) for 8 bit debug depth frame quantization
    const char* windowName = "Debug";
    const char* colorWindowName = "image";
    const Scalar debugColor0(0, 0, 128);
    const Scalar debugColor1(255, 0, 0);
    const Scalar debugColor2(255, 255, 255);
    const Scalar debugColor3(0, 255, 255);
    const Scalar debugColor4(255, 0, 255);

    int xMin = 50;
    int xMax = 550;
    int yMin = 50;
    int yMax = 300;

    Mat1s depth(480, 640); // 16 bit depth (in millimeters)
    Mat1b depth8(480, 640); // 8 bit depth
    Mat3b rgb(480, 640); // 8 bit depth

    Mat3b debug(480, 640); // debug visualization

    Mat1s foreground(640, 480);
    Mat1b foreground8(640, 480);

    Mat1b touch(640, 480); // touch mask

    Mat1s background(480, 640);
    vector<Mat1s> buffer(nBackgroundTrain);

    IplImage * image = cvCreateImage(cvSize(640, 480), 8, 3);
    IplImage * convertedImage = cvCreateImage(cvSize(640, 480), 8, 3);

    initOpenNI("niConfig.xml");

    // TUIO server object
    TuioServer* tuio;
    if (localClientMode) {
        tuio = new TuioServer();
    } else {
        tuio = new TuioServer("192.168.0.2", 3333, false);
    }
    TuioTime time;

    namedWindow(colorWindowName);
    createTrackbar("xMin", colorWindowName, &xMin, 640);
    createTrackbar("xMax", colorWindowName, &xMax, 640);
    createTrackbar("yMin", colorWindowName, &yMin, 480);
    createTrackbar("yMax", colorWindowName, &yMax, 480);
    // create some sliders
    namedWindow(windowName);
    createTrackbar("xMin", windowName, &xMin, 640);
    createTrackbar("xMax", windowName, &xMax, 640);
    createTrackbar("yMin", windowName, &yMin, 480);
    createTrackbar("yMax", windowName, &yMax, 480);

    Keyboard * piano = new Keyboard();
    (*piano).initKeyMap();

    system("qjackctl &");
    sleep(4);
    JackByTheNotes * notesJack = new JackByTheNotes();
    notesJack->connect();
    sleep(2);
    system("sudo jack_connect Piano:Rubinstein system:playback_1 &");

    map<double, timeval> keys;

    // create background model (average depth)
    for (unsigned int i = 0; i < nBackgroundTrain; i++) {
        xnContext.WaitAndUpdateAll();
        depth.data = (uchar*) xnDepthGenerator.GetDepthMap();
        buffer[i] = depth;
    }
    average(buffer, background);

    while (waitKey(1) != 27) {
        // read available data
        xnContext.WaitAndUpdateAll();

        // update 16 bit depth matrix
        depth.data = (uchar*) xnDepthGenerator.GetDepthMap();
        //xnImgeGenertor.GetGrayscale8ImageMap()

        XnRGB24Pixel* xnRgb =
            const_cast<XnRGB24Pixel*>(xnImgeGenertor.GetRGB24ImageMap());
        //		IplImage * image = cvCreateImage(cvSize(640, 480), 8, 3);
        //		IplImage * convertedImage = cvCreateImage(cvSize(640, 480), 8, 3);
        cvSetData		(image, xnRgb, 640 * 3);
        cvConvertImage(image, convertedImage, CV_CVTIMG_SWAP_RB);
        bool color = true;
        rgb = convertedImage;
        //		cvtColor(rgb,rgb,CV_RGB2BGR);
        // update rgb image
        //		rgb.data = (uchar*) xnImgeGenertor.GetRGB24ImageMap(); // segmentation fault here
        //		cvCvtColor(rgb, rgb, CV_BGR2RGB);

        // extract foreground by simple subtraction of very basic background model
        foreground = background - depth;

        // find touch mask by thresholding (points that are close to background = touch points)
        touch = (foreground > touchDepthMin) & (foreground < touchDepthMax);

        // extract ROI
        Rect roi(xMin, yMin, xMax - xMin, yMax - yMin);
        Mat touchRoi = touch(roi);

        // find touch points
        vector<vector<Point2i> > contours;
        vector<Point2f> touchPoints;
        findContours(touchRoi, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE,
                     Point2i(xMin, yMin));
        for (unsigned int i = 0; i < contours.size(); i++) {
            Mat contourMat(contours[i]);
            // find touch points by area thresholding
            if (contourArea(contourMat) > touchMinArea) {
                Scalar center = mean(contourMat);
                Point2i touchPoint(center[0], center[1]);
                touchPoints.push_back(touchPoint);
            }
        }

        // send TUIO cursors
        time = TuioTime::getSessionTime();
        tuio->initFrame(time);

        for (unsigned int i = 0; i < touchPoints.size(); i++) { // touch points
            float cursorX = (touchPoints[i].x - xMin) / (xMax - xMin);
            float cursorY = 1 - (touchPoints[i].y - yMin) / (yMax - yMin);
            TuioCursor* cursor = tuio->getClosestTuioCursor(cursorX, cursorY);
            // TODO improve tracking (don't move cursors away, that might be closer to another touch point)
            if (cursor == NULL || cursor->getTuioTime() == time) {
                tuio->addTuioCursor(cursorX, cursorY);
            } else {
                tuio->updateTuioCursor(cursor, cursorX, cursorY);
            }
        }

        tuio->stopUntouchedMovingCursors();
        tuio->removeUntouchedStoppedCursors();
        tuio->commitFrame();

        // draw debug frame
        depth.convertTo(depth8, CV_8U, 255 / debugFrameMaxDepth); // render depth to debug frame
        cvtColor(depth8, debug, CV_GRAY2BGR);
        debug.setTo(debugColor0, touch); // touch mask
        rectangle(debug, roi, debugColor1, 2); // surface boundaries
        if (color)
            rectangle(rgb, roi, debugColor1, 2); // surface boundaries

        // draw 10 white keys within the roi
        int stride = (xMax - xMin) / 10;
        for (int keys = 1; keys < 10; keys++) {
            Point lower(xMin + keys * stride, yMax);
            if (keys == 3 || keys == 7) {
                Point upper(xMin + keys * stride, yMin);
                line(debug, upper, lower, debugColor3, 2, 0);
                if (color)
                    line(rgb, upper, lower, debugColor3, 2, 0);
                continue;
            } else {
                Point upper(xMin + keys * stride, (yMin + yMax) / 2);
                line(debug, upper, lower, debugColor3, 2, 0);
                if (color)
                    line(rgb, upper, lower, debugColor3, 2, 0);
            }
            Point blkUpper(xMin + keys * stride - stride / 3, yMin);
            Point blkLower(xMin + keys * stride + stride / 3,
                           (yMin + yMax) / 2);
            rectangle(debug, blkUpper, blkLower, debugColor4, 2);
            if (color)
                rectangle(rgb, blkUpper, blkLower, debugColor4, 2);
        }

        for (unsigned int i = 0; i < touchPoints.size(); i++) { // touch points
            circle(debug, touchPoints[i], 5, debugColor2, CV_FILLED);
            if (color)
                circle(rgb, touchPoints[i], 5, debugColor2, CV_FILLED);
            double frequency = piano->keyFrequency(touchPoints[i].y - 50,
                                                   touchPoints[i].x - 50);

            cout << frequency << " " << touchPoints[i].y - 50 << " "
                 << touchPoints[i].x - 50 << endl;

            if (keys.find(frequency) == keys.end()) {
                Note * note = new Note(frequency, 2, 4000);
                notesJack->playNote(*note);
                timeval now;
                gettimeofday(&now, NULL);
                keys[frequency] = now;
            } else {
                timeval now;
                gettimeofday(&now, NULL);
                if ((now.tv_sec - keys[frequency].tv_sec) * 1000
                        + (now.tv_usec - keys[frequency].tv_usec) / 1000
                        > 1000) {
                    Note * note = new Note(frequency, 2, 4000);
                    notesJack->playNote(*note);
                    keys[frequency] = now;
                }
            }
        }
        // render debug frame (with sliders)
//		IplImage grayScale = debug;
//		cvFlip(&grayScale, NULL, 1);
//		Mat gray(&grayScale);
//		imshow(windowName, gray);
//
//		IplImage colorful = rgb;
//		cvFlip(&colorful, NULL, 1);
//		Mat real(&colorful);
//		imshow("image", real);
        imshow(windowName, debug);
        imshow("image", rgb);
        //		cvShowImage("image", image);
    }

    return 0;
}
Пример #17
0
void DrawDepthMap(const xn::DepthMetaData& dmd, const xn::SceneMetaData& smd, XnUserID player)
{
	static bool bInitialized = false;	
	static GLuint depthTexID;
	static unsigned char* pDepthTexBuf;
	static int texWidth, texHeight;

	 float topLeftX;
	 float topLeftY;
	 float bottomRightY;
	 float bottomRightX;
	float texXpos;
	float texYpos;

	if(!bInitialized)
	{

		texWidth =  getClosestPowerOfTwo(dmd.XRes());
		texHeight = getClosestPowerOfTwo(dmd.YRes());

//		printf("Initializing depth texture: width = %d, height = %d\n", texWidth, texHeight);
		depthTexID = initTexture((void**)&pDepthTexBuf,texWidth, texHeight) ;

//		printf("Initialized depth texture: width = %d, height = %d\n", texWidth, texHeight);
		bInitialized = true;

		topLeftX = dmd.XRes();
		topLeftY = 0;
		bottomRightY = dmd.YRes();
		bottomRightX = 0;
		texXpos =(float)dmd.XRes()/texWidth;
		texYpos  =(float)dmd.YRes()/texHeight;

		memset(texcoords, 0, 8*sizeof(float));
		texcoords[0] = texXpos, texcoords[1] = texYpos, texcoords[2] = texXpos, texcoords[7] = texYpos;

	}
	unsigned int nValue = 0;
	unsigned int nHistValue = 0;
	unsigned int nIndex = 0;
	unsigned int nX = 0;
	unsigned int nY = 0;
	unsigned int nNumberOfPoints = 0;
	XnUInt16 g_nXRes = dmd.XRes();
	XnUInt16 g_nYRes = dmd.YRes();

	unsigned char* pDestImage = pDepthTexBuf;

	const XnDepthPixel* pDepth = dmd.Data();
	const XnLabel* pLabels = smd.Data();

	// Calculate the accumulative histogram
	memset(g_pDepthHist, 0, MAX_DEPTH*sizeof(float));
	for (nY=0; nY<g_nYRes; nY++)
	{
		for (nX=0; nX<g_nXRes; nX++)
		{
			nValue = *pDepth;

			if (nValue != 0)
			{
				g_pDepthHist[nValue]++;
				nNumberOfPoints++;
			}

			pDepth++;
		}
	}

	for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
	{
		g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1];
	}
	if (nNumberOfPoints)
	{
		for (nIndex=1; nIndex<MAX_DEPTH; nIndex++)
		{
			g_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints)));
		}
	}

	pDepth = dmd.Data();
	{
		XnUInt32 nIndex = 0;
		// Prepare the texture map
		for (nY=0; nY<g_nYRes; nY++)
		{
			for (nX=0; nX < g_nXRes; nX++, nIndex++)
			{
				nValue = *pDepth;
				XnLabel label = *pLabels;
				XnUInt32 nColorID = label % nColors;
				if (label == 0)
				{
					nColorID = nColors;
				}

				if (nValue != 0)
				{
					nHistValue = g_pDepthHist[nValue];

					pDestImage[0] = nHistValue * Colors[nColorID][0]; 
					pDestImage[1] = nHistValue * Colors[nColorID][1];
					pDestImage[2] = nHistValue * Colors[nColorID][2];
				}
				else
				{
					pDestImage[0] = 0;
					pDestImage[1] = 0;
					pDestImage[2] = 0;
				}

				pDepth++;
				pLabels++;
				pDestImage+=3;
			}

			pDestImage += (texWidth - g_nXRes) *3;
		}
	}

	glBindTexture(GL_TEXTURE_2D, depthTexID);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, texWidth, texHeight, 0, GL_RGB, GL_UNSIGNED_BYTE, pDepthTexBuf);

	// Display the OpenGL texture map
	glColor4f(0.75,0.75,0.75,1);

	glEnable(GL_TEXTURE_2D);
	DrawTexture(dmd.XRes(),dmd.YRes(),0,0);	
	glDisable(GL_TEXTURE_2D);

	char strLabel[20] = "";
	XnUserID aUsers[15];
	XnUInt16 nUsers = 15;
	g_UserGenerator.GetUsers(aUsers, nUsers);
	for (int i = 0; i < nUsers; ++i)
	{
		XnPoint3D com;
		g_UserGenerator.GetCoM(aUsers[i], com);
		g_DepthGenerator.ConvertRealWorldToProjective(1, &com, &com);

		if (aUsers[i] == player)
			sprintf(strLabel, "%d (Player)", aUsers[i]);
		else
			sprintf(strLabel, "%d", aUsers[i]);

		glColor4f(1-Colors[i%nColors][0], 1-Colors[i%nColors][1], 1-Colors[i%nColors][2], 1);

		glRasterPos2i(com.X, com.Y);
		glPrintString(GLUT_BITMAP_HELVETICA_18, strLabel);
	}

	// Draw skeleton of user
	if (player != 0)
	{
		glBegin(GL_LINES);
		glColor4f(1-Colors[player%nColors][0], 1-Colors[player%nColors][1], 1-Colors[player%nColors][2], 1);
    
    // gesture
    static int gesture = 0;
    static XnPoint3D previousLeftHandPt;
    XnPoint3D newLeftHandPt;
    newLeftHandPt = getJointPoint(player, XN_SKEL_LEFT_HAND);
    
    if(previousLeftHandPt.X > 0 && previousLeftHandPt.X < 640)
      if(previousLeftHandPt.X - newLeftHandPt.X > 60)
          gesture = 1;
      else if(previousLeftHandPt.X - newLeftHandPt.X < -60)
          gesture = 2;
      else if(previousLeftHandPt.Y - newLeftHandPt.Y > 60)
          gesture = 3;
      else if(previousLeftHandPt.Y - newLeftHandPt.Y < -60)
          gesture = 4;
      else
          gesture = 0;
    if(gesture != 0)
      printf("gesture: %d\n", gesture);

    previousLeftHandPt = newLeftHandPt;
		
    // head
    XnPoint3D pt = getJointPoint(player, XN_SKEL_HEAD);
    
    // save
    saveLocation(pt, newLeftHandPt, gesture);
    

    DrawLimb(player, XN_SKEL_HEAD, XN_SKEL_NECK);

		DrawLimb(player, XN_SKEL_NECK, XN_SKEL_LEFT_SHOULDER);
		DrawLimb(player, XN_SKEL_LEFT_SHOULDER, XN_SKEL_LEFT_ELBOW);
		DrawLimb(player, XN_SKEL_LEFT_ELBOW, XN_SKEL_LEFT_HAND);

		DrawLimb(player, XN_SKEL_NECK, XN_SKEL_RIGHT_SHOULDER);
		DrawLimb(player, XN_SKEL_RIGHT_SHOULDER, XN_SKEL_RIGHT_ELBOW);
		DrawLimb(player, XN_SKEL_RIGHT_ELBOW, XN_SKEL_RIGHT_HAND);

		DrawLimb(player, XN_SKEL_LEFT_SHOULDER, XN_SKEL_TORSO);
		DrawLimb(player, XN_SKEL_RIGHT_SHOULDER, XN_SKEL_TORSO);

		DrawLimb(player, XN_SKEL_TORSO, XN_SKEL_LEFT_HIP);
		DrawLimb(player, XN_SKEL_LEFT_HIP, XN_SKEL_LEFT_KNEE);
		DrawLimb(player, XN_SKEL_LEFT_KNEE, XN_SKEL_LEFT_FOOT);

		DrawLimb(player, XN_SKEL_TORSO, XN_SKEL_RIGHT_HIP);
		DrawLimb(player, XN_SKEL_RIGHT_HIP, XN_SKEL_RIGHT_KNEE);
		DrawLimb(player, XN_SKEL_RIGHT_KNEE, XN_SKEL_RIGHT_FOOT);
		glEnd();
	}
}
Пример #18
0
int main(int argc, char* argv[]) {
  glue.Init(argc, argv, 640, 240, "TrackHand");

  xn::Context context;
  XnStatus status = context.Init();
  bmg::OnError(status, []{
    std::cout << "Couldn't init OpenNi!" << std::endl;
    exit(1);
  });

  xn::ImageGenerator image_generator;
  status = image_generator.Create(context);
  bmg::OnError(status, []{
    std::cout << "Couldn't create image generator!" << std::endl;
  });

  status = depth_generator.Create(context);
  bmg::OnError(status, []{
    std::cout << "Couldn't create depth generator!" << std::endl;
  });

  xn::ImageMetaData image_metadata;
  xn::DepthMetaData depth_metadata;

  // Create gesture & hands generators
  status = gesture_generator.Create(context);
  bmg::OnError(status, []{
    std::cout << "Couldn't create gesture generator!" << std::endl;
  });
  status = hands_generator.Create(context);
  bmg::OnError(status, []{
    std::cout << "Couldn't create hands generator!" << std::endl;
  });

  // Register to callbacks
  XnCallbackHandle h1, h2;
  gesture_generator
    .RegisterGestureCallbacks(Gesture_Recognized, Gesture_Process, NULL, h1);
  hands_generator
    .RegisterHandCallbacks(Hand_Create, Hand_Update, Hand_Destroy, NULL, h2);

  status = context.StartGeneratingAll();
  bmg::OnError(status, []{
    std::cout << "Couldn't generate all data!" << std::endl;
  });
  status = gesture_generator.AddGesture(GESTURE, NULL);
  bmg::OnError(status, []{
    std::cout << "Couldn't add gesture!" << std::endl;
  });

  glue.BindDisplayFunc([&]{
    glue.BeginDraw();

    // here goes code for app main loop
    XnStatus status = context.WaitAndUpdateAll();
    bmg::OnError(status, []{
      std::cout << "Couldn't update and wait for new data!" << std::endl;
    });

    image_generator.GetMetaData(image_metadata);
    unsigned imageX = image_metadata.XRes();
    unsigned imageY = image_metadata.YRes();

    glue.DrawOnTexture(
      (void*)image_metadata.RGB24Data(),
      imageX, imageY,
      imageX, imageY,
      320, 0, 640, 240);

    depth_generator.GetMetaData(depth_metadata);
    unsigned depthX = depth_metadata.XRes();
    unsigned depthY = depth_metadata.YRes();

    XnRGB24Pixel* transformed_depth_map = new XnRGB24Pixel[depthX * depthY];
    bmg::CalculateDepth(
      depth_generator.GetDepthMap(), depthX, depthY, MAX_DEPTH, transformed_depth_map);

    glue.DrawOnTexture(
      (void*)transformed_depth_map,
      depthX, depthY,
      depthX, depthY,
      0, 0,
      320, 240);
    delete [] transformed_depth_map;

    if (hand_recognized) {
      // Draw point over tracked hand
      glue.DrawPointOverRegion(static_cast<unsigned>(projective_point.X), static_cast<unsigned>(projective_point.Y), 0, 0);
      glue.DrawPointOverRegion(static_cast<unsigned>(projective_point.X), static_cast<unsigned>(projective_point.Y), 320, 0);
    }

    glue.EndDraw();
  });

  glue.BindKeyboardFunc([](unsigned char key, int x, int y){
    switch(key) {
    case 27:
      exit(1);
    }
  });

  glue.Run();
  context.Release();
}
Пример #19
0
XnStatus XnFileDevice::UpdateS2DTables(const xn::DepthGenerator& depth)
{
    XnStatus nRetVal = XN_STATUS_OK;

    XnUInt64 nTemp;
    XnDouble dTemp;

    // get config
    XnShiftToDepthConfig config;

    nRetVal = depth.GetIntProperty(XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE, nTemp);
    XN_IS_STATUS_OK(nRetVal);
    config.nZeroPlaneDistance = (XnDepthPixel)nTemp;

    nRetVal = depth.GetRealProperty(XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE, dTemp);
    XN_IS_STATUS_OK(nRetVal);
    config.fZeroPlanePixelSize = (XnFloat)dTemp;

    nRetVal = depth.GetRealProperty(XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE, dTemp);
    XN_IS_STATUS_OK(nRetVal);
    config.fEmitterDCmosDistance = (XnFloat)dTemp;

    nRetVal = depth.GetIntProperty(XN_STREAM_PROPERTY_MAX_SHIFT, nTemp);
    XN_IS_STATUS_OK(nRetVal);
    config.nDeviceMaxShiftValue = (XnUInt32)nTemp;

    config.nDeviceMaxDepthValue = depth.GetDeviceMaxDepth();

    nRetVal = depth.GetIntProperty(XN_STREAM_PROPERTY_CONST_SHIFT, nTemp);
    XN_IS_STATUS_OK(nRetVal);
    config.nConstShift = (XnUInt32)nTemp;

    nRetVal = depth.GetIntProperty(XN_STREAM_PROPERTY_PIXEL_SIZE_FACTOR, nTemp);
    XN_IS_STATUS_OK(nRetVal);
    config.nPixelSizeFactor = (XnUInt32)nTemp;

    nRetVal = depth.GetIntProperty(XN_STREAM_PROPERTY_PARAM_COEFF, nTemp);
    XN_IS_STATUS_OK(nRetVal);
    config.nParamCoeff = (XnUInt32)nTemp;

    nRetVal = depth.GetIntProperty(XN_STREAM_PROPERTY_SHIFT_SCALE, nTemp);
    XN_IS_STATUS_OK(nRetVal);
    config.nShiftScale = (XnUInt32)nTemp;

    config.nDepthMinCutOff = 0;
    config.nDepthMaxCutOff = (XnDepthPixel)config.nDeviceMaxDepthValue;

    if (!m_ShiftToDepth.bIsInitialized)
    {
        nRetVal = XnShiftToDepthInit(&m_ShiftToDepth, &config);
        XN_IS_STATUS_OK(nRetVal);
    }
    else
    {
        nRetVal = XnShiftToDepthUpdate(&m_ShiftToDepth, &config);
        XN_IS_STATUS_OK(nRetVal);
    }

    // notify
    nRetVal = m_pNotifications->OnNodeGeneralPropChanged(m_pNotificationsCookie, depth.GetName(), XN_STREAM_PROPERTY_S2D_TABLE, m_ShiftToDepth.nShiftsCount * sizeof(XnDepthPixel), m_ShiftToDepth.pShiftToDepthTable);
    XN_IS_STATUS_OK(nRetVal);

    nRetVal = m_pNotifications->OnNodeGeneralPropChanged(m_pNotificationsCookie, depth.GetName(), XN_STREAM_PROPERTY_D2S_TABLE, m_ShiftToDepth.nDepthsCount * sizeof(XnUInt16), m_ShiftToDepth.pDepthToShiftTable);
    XN_IS_STATUS_OK(nRetVal);

    return (XN_STATUS_OK);
}
Пример #20
0
int main(int argc, char **argv)
{
    commInit("192.168.1.255", 54321);
    XnStatus nRetVal = XN_STATUS_OK;
    xn::EnumerationErrors errors;

    const char *fn = NULL;
    if    (fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
    else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
    else {
        printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
        return XN_STATUS_ERROR;
    }
    printf("Reading config from: '%s'\n", fn);

    nRetVal = g_Context.InitFromXmlFile(fn, g_scriptNode, &errors);
    if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
    {
        XnChar strError[1024];
        errors.ToString(strError, 1024);
        printf("%s\n", strError);
        return (nRetVal);
    }
    else if (nRetVal != XN_STATUS_OK)
    {
        printf("Open failed: %s\n", xnGetStatusString(nRetVal));
        return (nRetVal);
    }

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
    CHECK_RC(nRetVal,"No depth");

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    if (nRetVal != XN_STATUS_OK)
    {
        nRetVal = g_UserGenerator.Create(g_Context);
        CHECK_RC(nRetVal, "Find user generator");
    }

    XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
    {
        printf("Supplied user generator doesn't support skeleton\n");
        return 1;
    }
    nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    CHECK_RC(nRetVal, "Register to user callbacks");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
    CHECK_RC(nRetVal, "Register to calibration start");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
    CHECK_RC(nRetVal, "Register to calibration complete");

    if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
    {
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
        {
            printf("Pose required, but not supported\n");
            return 1;
        }
        nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
        CHECK_RC(nRetVal, "Register to Pose Detected");
        g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
    }

    g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");

    XnUserID aUsers[MAX_NUM_USERS];
    XnUInt16 nUsers;
    int j;

    printf("Starting to run\n");
    if(g_bNeedPose)
    {
        printf("Assume calibration pose\n");
    }
    XnUInt32 epochTime = 0;
    while (!xnOSWasKeyboardHit())
    {
        g_Context.WaitOneUpdateAll(g_UserGenerator);
        // print the torso information for the first user already tracking
        nUsers=MAX_NUM_USERS;
        g_UserGenerator.GetUsers(aUsers, nUsers);
        int numTracked=0;
        int userToPrint=-1;
        for(XnUInt16 i=0; i<nUsers; i++)
        {
            if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])==FALSE)
                continue;

            if(getJoints(aUsers[i])){
              /**
              printf("Left Elbow: %.2f\nLeft Shoulder Roll: %.2f\nLeft Shoulder Pitch: %.2f\nHead Pitch: %.2f\n", 
                  findAngle(jointArr[6], jointArr[4], jointArr[8], 0),
                  findAngle(jointArr[4], jointArr[10], jointArr[6], 0),
                  findAngle(jointArr[4], jointArr[10], jointArr[6], 1),
                  findAngle(jointArr[2], jointArr[1], jointArr[0], 1)
                  );
              **/
              float headAngle = findAngle(jointArr[2], jointArr[1], jointArr[0], 1);
              float leftElbowAngle = findAngle(jointArr[6], jointArr[4], jointArr[8], 0);
              float leftShoulderRoll = findAngle(jointArr[4], jointArr[10], jointArr[6], 0);
              float leftShoulderPitch = findAngle(jointArr[4], jointArr[10], jointArr[6], 1);
              float rightElbowAngle = findAngle(jointArr[5], jointArr[3], jointArr[7], 0);
              float rightShoulderRoll = findAngle(jointArr[3], jointArr[9], jointArr[5], 0);
              float rightShoulderPitch = findAngle(jointArr[3], jointArr[9], jointArr[5], 1);
              ostringstream ostr;
              ostr << "{"<<headAngle<<",{"<<leftShoulderPitch<<","<<leftShoulderRoll<<","<<
                leftElbowAngle<<"},{"<<rightShoulderPitch<<","<<rightShoulderRoll<<","<<rightElbowAngle<<"},}";
              string send = ostr.str();
              //cout<<send;
              //printRotation(aUsers[i]);
              commSend(send);
              usleep(200000);             
            }
        }
        
    }
    g_scriptNode.Release();
    g_DepthGenerator.Release();
    g_UserGenerator.Release();
    g_Context.Release();


}
Пример #21
0
int main(int argc, char **argv)
{
	sender.setup(HOST, PORT);
	
	XnStatus rc = XN_STATUS_OK;

	rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH);
	CHECK_RC(rc, "InitFromXml");

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, "Find depth generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	CHECK_RC(rc, "Find user generator");

	XnCallbackHandle h;

	g_UserGenerator.RegisterUserCallbacks(NewUser, LostUser, NULL, h);
	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	XnCallbackHandle hCalib;
	XnCallbackHandle hPose;
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(&CalibrationStart, &CalibrationEnd, NULL, hCalib);
	g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(&PoseDetected, NULL, NULL, hPose);

	rc = g_Context.StartGeneratingAll();
	CHECK_RC(rc, "StartGenerating");

	xn::DepthMetaData depthMD;
	g_DepthGenerator.GetMetaData(depthMD);

	fXRes = depthMD.XRes();
	fYRes = depthMD.YRes();
	fMaxDepth = depthMD.ZRes();
	
	nCutOffMin = 0;
	nCutOffMax = fMaxDepth;

	nPointerX = fXRes / 2;
	nPointerY = fXRes / 2;
	nPointerDiffX = (WIN_SIZE_X / fXRes / 2) + 1;
	nPointerDiffY = (WIN_SIZE_Y / fXRes / 2) + 1;

	if (argc == 2)
	{
		nCutOffMax = atol(argv[1]);
	}

	srand(XnUInt32(time(NULL)));

	glutInit(&argc, argv);
	glutInitDisplayString("stencil depth>16 double rgb samples=0");
    glutInitWindowSize(WIN_SIZE_X, WIN_SIZE_Y);
	glutCreateWindow("Prime Sense Stick Figure Sample");
	glutSetCursor(GLUT_CURSOR_NONE);

	init_opengl();

	glut_helpers_initialize();

	cb.passive_motion_function = MotionCallback;
	cb.keyboard_function = key;

	camera.configure_buttons(0);
	camera.set_camera_mode(true);
	camera.set_parent_rotation( & camera.trackball.r);
	camera.enable();

	object.configure_buttons(1);
	object.translator.t[2] = -1;
	object.translator.scale *= .1f;
	object.trackball.r = rotationf(vec3f(2.0,0.01,0.01), to_radians(180));
	object.set_parent_rotation( & camera.trackball.r);
	object.disable();

	light.configure_buttons(0);
	light.translator.t = vec3f(.5, .5, -1);
	light.set_parent_rotation( & camera.trackball.r);
	light.disable();

	reshaper.zNear = 1;
	reshaper.zFar = 100;

    // make sure all interactors get glut events
	glut_add_interactor(&cb);
	glut_add_interactor(&camera);
	glut_add_interactor(&reshaper);
	glut_add_interactor(&light);
	glut_add_interactor(&object);

	camera.translator.t = vec3f(0, 0, 0);
	camera.trackball.r = rotationf(vec3f(0, 0, 0), to_radians(0));

	light.translator.t = vec3f (0, 1.13, -2.41);
	light.trackball.r = rotationf(vec3f(0.6038, -0.1955, -0.4391), to_radians(102));

	glutIdleFunc(idle);
	glutDisplayFunc(display);
	// Per frame code is in display
	glutMainLoop();


	return (0);
}
Пример #22
0
int main(int argc, char **argv) {
	ros::init(argc, argv, "PersonTracker");
	ros::NodeHandle nh;
	ros::Rate loop_rate(10);
    ROS_INFO("Initalizing Arm Connection....");
    ros::Publisher leftArmPublisher = nh.advertise<prlite_kinematics::SphereCoordinate>("/armik/n0/position", 1000);
    ros::Publisher rightArmPublisher = nh.advertise<prlite_kinematics::SphereCoordinate>("/armik/n1/position", 1000);
	yourmom = nh.advertise<prlite_kinematics::some_status_thing>("kinect_hack_status", 1000);
    ros::Duration(2.0).sleep();
    prlite_kinematics::SphereCoordinate coord;
	prlite_kinematics::some_status_thing status;
    coord.radius = 35.0;
    coord.phi = 0.0;
    coord.theta = 0.0;
	status.lulz = 0;	//initialized/reset
	yourmom.publish(status);
    leftArmPublisher.publish(coord);
    rightArmPublisher.publish(coord);
	ROS_INFO("Initalizing Kinect + NITE....");
	XnStatus nRetVal = XN_STATUS_OK;
	xn::EnumerationErrors errors;
	nRetVal = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, &errors);
	
	if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
	{
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		printf("%s\n", strError);
		return (nRetVal);
	} else if (nRetVal != XN_STATUS_OK) {
		printf("Open failed: %s\n", xnGetStatusString(nRetVal));
		return (nRetVal);
	}

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(nRetVal, "Find depth generator");
	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

	XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
	{
		printf("Supplied user generator doesn't support skeleton\n");
		return 1;
	}
	g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);

	if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
	{
		g_bNeedPose = TRUE;
		if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
		{
			printf("Pose required, but not supported\n");
			return 1;
		}
		g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);
		g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");
    ROS_INFO("Ready To Go!\n");

	while (ros::ok()) {
		// Update OpenNI
		g_Context.WaitAndUpdateAll();
		xn::SceneMetaData sceneMD;
		xn::DepthMetaData depthMD;
		g_DepthGenerator.GetMetaData(depthMD);
		g_UserGenerator.GetUserPixels(0, sceneMD);
		// Print positions
		XnUserID aUsers[15];
		XnUInt16 nUsers = 15;
		g_UserGenerator.GetUsers(aUsers, nUsers);
		for (int i = 0; i < nUsers; ++i) {
			if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])) {
				// Read joint positions for person (No WRISTs)
				XnSkeletonJointPosition torsoPosition, lShoulderPosition, rShoulderPosition, neckPosition, headPosition, lElbowPosition, rElbowPosition;
                XnSkeletonJointPosition rWristPosition, lWristPosition, rHipPosition, lHipPosition, lKneePosition, rKneePosition;
                XnSkeletonJointPosition lFootPosition, rFootPosition;
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_TORSO, torsoPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_NECK, neckPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_HEAD, headPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_SHOULDER, lShoulderPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_SHOULDER, rShoulderPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_ELBOW, lElbowPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_ELBOW, rElbowPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_HAND, lWristPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_HAND, rWristPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_HIP, lHipPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_HIP, rHipPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_KNEE, lKneePosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_KNEE, rKneePosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_FOOT, lFootPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_FOOT, rFootPosition);

                // Read Joint Orientations
                XnSkeletonJointOrientation torsoOrientation, lShoulderOrientation, rShoulderOrientation, lHipOrientation, rHipOrientation;
                XnSkeletonJointOrientation lWristOrientation, rWristOrientation, lElbowOrientation, rElbowOrientation;
                XnSkeletonJointOrientation headOrientation, neckOrientation;
                XnSkeletonJointOrientation lKneeOrientation, rKneeOrientation, lFootOrientation, rFootOrientation;
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_TORSO, torsoOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_HEAD, headOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_NECK, neckOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_SHOULDER, lShoulderOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_SHOULDER, rShoulderOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_HIP, lHipOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_HIP, rHipOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_ELBOW, lElbowOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_ELBOW, rElbowOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_HAND, lWristOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_HAND, rWristOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_KNEE, lKneeOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_KNEE, rKneeOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_FOOT, lFootOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_FOOT, rFootOrientation);

	            XnFloat* m = torsoOrientation.orientation.elements;
	            KDL::Rotation torsoO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lShoulderOrientation.orientation.elements;
                KDL::Rotation lShouldO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rShoulderOrientation.orientation.elements;
                KDL::Rotation rShouldO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lHipOrientation.orientation.elements;
                KDL::Rotation lHipO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rHipOrientation.orientation.elements;
                KDL::Rotation rHipO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = headOrientation.orientation.elements;
                KDL::Rotation headO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = neckOrientation.orientation.elements;
                KDL::Rotation neckO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lElbowOrientation.orientation.elements;
                KDL::Rotation lElbowO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rElbowOrientation.orientation.elements;
                KDL::Rotation rElbowO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lWristOrientation.orientation.elements;
                KDL::Rotation lWristO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rWristOrientation.orientation.elements;
                KDL::Rotation rWristO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lKneeOrientation.orientation.elements;
                KDL::Rotation lKneeO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rKneeOrientation.orientation.elements;
                KDL::Rotation rKneeO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lFootOrientation.orientation.elements;
                KDL::Rotation lFootO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rFootOrientation.orientation.elements;
                KDL::Rotation rFootO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);

	            double qx = 0.0, qy = 0.0, qz = 0.0, qw = 0.0;

				// Get Points in 3D space
				XnPoint3D torso = torsoPosition.position;
				XnPoint3D lShould = lShoulderPosition.position;
				XnPoint3D rShould = rShoulderPosition.position;
                XnPoint3D lElbow = lElbowPosition.position;
                XnPoint3D rElbow = rElbowPosition.position;
                XnPoint3D lWrist = lWristPosition.position;
                XnPoint3D rWrist = rWristPosition.position;
                XnPoint3D neck = neckPosition.position;
                XnPoint3D head = headPosition.position;
                XnPoint3D lHip = lHipPosition.position;
                XnPoint3D rHip = rHipPosition.position;
                XnPoint3D lKnee = lKneePosition.position;
                XnPoint3D rKnee = rKneePosition.position;
                XnPoint3D lFoot = lFootPosition.position;
                XnPoint3D rFoot = rFootPosition.position;

                // ---------- ARM CONTROLLER HACK ------------------

                // Calculate arm length of user
                double lenL = calc3DDist( lShould, lElbow ) + calc3DDist( lElbow, lWrist );
                double lenR = calc3DDist( rShould, rElbow ) + calc3DDist( rElbow, rWrist );
                double distL = calc3DDist(lShould, lWrist);
                double distR = calc3DDist(rShould, rWrist);

                // Calculate positions
                prlite_kinematics::SphereCoordinate lCoord;
                prlite_kinematics::SphereCoordinate rCoord;

                lCoord.radius = 35 * (distL / lenL);
                rCoord.radius = 35 * (distR / lenR);

                lCoord.theta = -atan2(lShould.X - lWrist.X, lShould.Z - lWrist.Z);
                rCoord.theta = -atan2(rShould.X - rWrist.X, rShould.Z - rWrist.Z);
                if(lCoord.theta > 1.0) lCoord.theta = 1.0;
                if(lCoord.theta < -1.0) lCoord.theta = -1.0;
                if(rCoord.theta > 1.0) rCoord.theta = 1.0;
                if(rCoord.theta < -1.0) rCoord.theta = -1.0;

                lCoord.phi = -atan2(lShould.Y - lWrist.Y, lShould.X - lWrist.X);
                rCoord.phi = -atan2(rShould.Y - rWrist.Y, rShould.X - rWrist.X);
                if(lCoord.phi > 1.25) lCoord.phi = 1.25;
                if(lCoord.phi < -0.33) lCoord.phi = -0.33;
                if(rCoord.phi > 1.2) rCoord.phi = 1.25;
                if(rCoord.phi < -0.33) rCoord.phi = -0.33;

                ROS_INFO("User %d: Left (%lf,%lf,%lf), Right (%lf,%lf,%lf)", i, lCoord.radius, lCoord.theta, lCoord.phi, rCoord.radius, rCoord.theta, rCoord.phi);

                // Publish to arms
                leftArmPublisher.publish(rCoord);
                rightArmPublisher.publish(lCoord);

                // ---------- END HACK -----------------

                // Publish Transform
                static tf::TransformBroadcaster br;
                tf::Transform transform;
                std::stringstream body_name, neck_name, lshould_name, rshould_name, head_name, relbow_name, lelbow_name, lwrist_name, rwrist_name;
                std::stringstream lhip_name, rhip_name, lknee_name, rknee_name, lfoot_name, rfoot_name;
                body_name << "Torso (" << i << ")" << std::ends;
                neck_name << "Neck (" << i << ")" << std::ends;
                head_name << "Head (" << i << ")" << std::ends;
                lshould_name << "Shoulder L (" << i << ")" << std::ends;
                rshould_name << "Shoulder R (" << i << ")" << std::ends;
                lelbow_name << "Elbow L (" << i << ")" << std::ends;
                relbow_name << "Elbow R (" << i << ")" << std::ends;
                lwrist_name << "Wrist L (" << i << ")" << std::ends;
                rwrist_name << "Wrist R (" << i << ")" << std::ends;
                lhip_name << "Hip L (" << i << ")" << std::ends;
                rhip_name << "Hip R (" << i << ")" << std::ends;
                lknee_name << "Knee L (" << i << ")" << std::ends;
                rknee_name << "Knee R (" << i << ")" << std::ends;
                lfoot_name << "Foot L (" << i << ")" << std::ends;
                rfoot_name << "Foot R (" << i << ")" << std::ends;

                // Publish Torso
                torsoO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(torso.X / ADJUST_VALUE, torso.Y / ADJUST_VALUE, torso.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", body_name.str().c_str()));  
                // Publish Left Shoulder
                lShouldO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lShould.X / ADJUST_VALUE, lShould.Y / ADJUST_VALUE, lShould.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lshould_name.str().c_str()));
                // Publish Right Shoulder
                rShouldO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rShould.X / ADJUST_VALUE, rShould.Y / ADJUST_VALUE, rShould.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rshould_name.str().c_str())); 
                // Publish Left Elbow
                lElbowO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lElbow.X / ADJUST_VALUE, lElbow.Y / ADJUST_VALUE, lElbow.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lelbow_name.str().c_str()));
                // Publish Right Elbow
                rElbowO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rElbow.X / ADJUST_VALUE, rElbow.Y / ADJUST_VALUE, rElbow.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", relbow_name.str().c_str()));
                // Publish Left Wrist
                lWristO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lWrist.X / ADJUST_VALUE, lWrist.Y / ADJUST_VALUE, lWrist.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lwrist_name.str().c_str()));
                // Publish Right Wrist
                rWristO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rWrist.X / ADJUST_VALUE, rWrist.Y / ADJUST_VALUE, rWrist.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rwrist_name.str().c_str()));
                // Publish Neck
                neckO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(neck.X / ADJUST_VALUE, neck.Y / ADJUST_VALUE, neck.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", neck_name.str().c_str())); 
                // Publish Head
                headO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(head.X / ADJUST_VALUE, head.Y / ADJUST_VALUE, head.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", head_name.str().c_str()));
                // Publish Left Hip
                lHipO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lHip.X / ADJUST_VALUE, lHip.Y / ADJUST_VALUE, lHip.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lhip_name.str().c_str()));
                // Publish Right Hip
                rHipO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rHip.X / ADJUST_VALUE, rHip.Y / ADJUST_VALUE, rHip.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rhip_name.str().c_str())); 
                // Publish Left Knee
                lKneeO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lKnee.X / ADJUST_VALUE, lKnee.Y / ADJUST_VALUE, lKnee.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lknee_name.str().c_str()));
                // Publish Right Knee
                rKneeO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rKnee.X / ADJUST_VALUE, rKnee.Y / ADJUST_VALUE, rKnee.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rknee_name.str().c_str())); 
                // Publish Left Foot
                lFootO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lFoot.X / ADJUST_VALUE, lFoot.Y / ADJUST_VALUE, lFoot.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lfoot_name.str().c_str()));
                // Publish Right Foot
                rFootO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rFoot.X / ADJUST_VALUE, rFoot.Y / ADJUST_VALUE, rFoot.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rfoot_name.str().c_str()));
			}
		}
		ros::spinOnce();
		//loop_rate.sleep();
	}
	g_Context.Shutdown();
	return 0;
}
Пример #23
0
bool DataCapture::initialise()
{
    context_.Shutdown();

    XnStatus rc = context_.Init(); 
    if( rc != XN_STATUS_OK )
    {
        std::cout << "Init: " << xnGetStatusString(rc) << std::endl;
        return false;
    }

    rc = depthGen_.Create(context_);
    if( rc != XN_STATUS_OK )
    {
        std::cout << "depthGen.Create: " << xnGetStatusString(rc) << std::endl;
        return false;
    }

    rc = imageGen_.Create(context_);
    if( rc != XN_STATUS_OK )
    {
        std::cout << "imageGen.Create: " << xnGetStatusString(rc) << std::endl;
        return false;
    }

    rc = imageGen_.SetPixelFormat(XN_PIXEL_FORMAT_RGB24);
    if( rc != XN_STATUS_OK )
    {
        std::cout << "SetPixelFormat: " << xnGetStatusString(rc) << std::endl;
        return false;
    }

    XnMapOutputMode imgMode;
    imgMode.nXRes = 640; // XN_VGA_X_RES
    imgMode.nYRes = 480; // XN_VGA_Y_RES
    imgMode.nFPS = 30;
    rc = imageGen_.SetMapOutputMode(imgMode);
    if( rc != XN_STATUS_OK )
    {
        std::cout << "image SetMapOutputMode: " << xnGetStatusString(rc) << std::endl;
        return false;
    }

    rc = depthGen_.SetMapOutputMode(imgMode);
    if( rc != XN_STATUS_OK )
    {
        std::cout << "depth SetMapOutputMode: " << xnGetStatusString(rc) << std::endl;
        return false;
    }

    depthGen_.GetMetaData(depthMd_);
    std::cout << "Depth offset " << depthMd_.XOffset() << " " << depthMd_.YOffset() << std::endl;

    // set the depth image viewpoint
    depthGen_.GetAlternativeViewPointCap().SetViewPoint(imageGen_);

    // read off the depth camera field of view.  This is the FOV corresponding to
    // the IR camera viewpoint, regardless of the alternative viewpoint settings.
    XnFieldOfView fov;
    rc = depthGen_.GetFieldOfView(fov);
    std::cout << "Fov: " << fov.fHFOV << " " << fov.fVFOV << std::endl;

    pDepthData_ = new char [640 * 480];
    pRgbData_ = new char [640 * 480 * 3];

    return true;
}
void glut_display() {
	xn::DepthMetaData pDepthMapMD;
	xn::ImageMetaData pImageMapMD;
#ifdef DEBUGOUT
	ofstream datei;
#endif

	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();
	gluPerspective(45, WINDOW_SIZE_X/WINDOW_SIZE_Y, 1000, 5000);
//	glOrtho(0, WINDOW_SIZE_X, WINDOW_SIZE_Y, 0, -128, 128);

	glMatrixMode(GL_TEXTURE);
	glLoadIdentity();
//	glTranslatef(-12.8/640.0, 9.0/480.0, 0);
//	glTranslatef(-12.8/630.0, 9.0/480.0,0);
	glScalef(scalex, scaley, 1.0);
	glTranslatef(transx/630, transy/480, 0.0);

	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	
	rot_angle+=0.7;

	// Warten auf neue Daten vom Tiefengenerator
	nRetVal = context.WaitAndUpdateAll();
	checkError("Fehler beim Aktualisieren der Daten", nRetVal);

	// Aktuelle Depth Metadaten auslesen
	depth.GetMetaData(pDepthMapMD);
	// Aktuelle Depthmap auslesen
	const XnDepthPixel* pDepthMap = depth.GetDepthMap();

	if(maxdepth==-1)
		maxdepth = getMaxDepth(pDepthMap);

	// Aktuelle Image Metadaten auslesen 
	image.GetMetaData(pImageMapMD);
	//Aktuelles Bild auslesen
	const XnRGB24Pixel* pImageMap = image.GetRGB24ImageMap();

	glColor3f(1, 1, 1);
//	XnDepthPixel maxdepth = depth.GetDeviceMaxDepth();
	const unsigned int xres = pDepthMapMD.XRes();
	const unsigned int yres = pDepthMapMD.YRes();

#ifdef DEBUGOUT
	datei.open("daniel.txt", ios::out);
#endif

	for(unsigned int y=0; y<yres-1; y++) {
		for(unsigned int x=0; x<xres; x++) {
			aDepthMap[x+y*xres] = static_cast<GLubyte>(static_cast<float>(pDepthMap[x+y*xres])/static_cast<float>(maxdepth)*255);
		}
	}

	/*
	glEnable(GL_TEXTURE_2D);
	glPushMatrix();
	glLoadIdentity();
	glTranslatef(-800, 0, -2000);
	glBindTexture(GL_TEXTURE_2D, texture_rgb);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, pImageMap);
	glBegin(GL_QUADS);
		glTexCoord2f(0,1); glVertex3f(0,0,0);
		glTexCoord2f(1,1); glVertex3f(640,0,0);
		glTexCoord2f(1,0); glVertex3f(640,480,0);
		glTexCoord2f(0,0); glVertex3f(0,480,0);
	glEnd();
	glPopMatrix();

	glPushMatrix();
	glLoadIdentity();
	glTranslatef(-800, 600, -2000);
	glBindTexture(GL_TEXTURE_2D, texture_depth);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE8, 640, 480, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, aDepthMap);
	glBegin(GL_QUADS);
		glTexCoord2f(0,1); glVertex3f(0,0,0);
		glTexCoord2f(1,1); glVertex3f(640,0,0);
		glTexCoord2f(1,0); glVertex3f(640,480,0);
		glTexCoord2f(0,0); glVertex3f(0,480,0);
	glEnd();
	glPopMatrix();*/

	glPushMatrix();
	glLoadIdentity();
	glTranslatef(-100, -100, -2000);
	glRotatef(cx,0,1,0);
	glRotatef(cy,1,0,0);
	glTranslatef(-320, -240, 1000);
	glEnable(GL_TEXTURE_2D);
	glBindTexture(GL_TEXTURE_2D, texture_rgb);
	glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 640, 480, 0, GL_RGB, GL_UNSIGNED_BYTE, pImageMap);
	glBegin(GL_POINTS);
	for(unsigned int y=0; y<yres-1; y++) {
		for(unsigned int x=0; x<630; x++) {
			if(pDepthMap[x+y*xres]!=0) {
				glTexCoord2f(static_cast<float>(x)/static_cast<float>(630), static_cast<float>(y)/static_cast<float>(480)); 
				glVertex3f(x, (yres-y), -pDepthMap[x+y*xres]/2.00);
#ifdef DEBUGOUT
				datei << t_gamma[pDepthMap[x+y*xres]] << endl;
#endif
			}
		}
	}
	glEnd();
	glPopMatrix();
	glDisable(GL_TEXTURE_2D);
	glutSwapBuffers();
#ifdef DEBUGOUT
	datei.close();
	exit(-1);
#endif
}
Пример #25
0
cv::Point xncv::worldToProjective(const XnPoint3D& point, const xn::DepthGenerator& depth)
{
	XnPoint3D p;
	depth.ConvertRealWorldToProjective(1, &point, &p);
	return cv::Point(static_cast<int>(p.X), static_cast<int>(p.Y));
}
int main(int argc, char **argv) {
	nRetVal = XN_STATUS_OK;

	/* Context initialisieren (Kameradaten) */
	nRetVal = context.Init();
	checkError("Fehler beim Initialisieren des Context", nRetVal)?0:exit(-1);



	/* Tiefengenerator erstellen */
	nRetVal = depth.Create(context);
	checkError("Fehler beim Erstellen des Tiefengenerators", nRetVal)?0:exit(-1);

	/* Tiefengenerator einstellen */
	XnMapOutputMode outputModeDepth;
	outputModeDepth.nXRes = 640;
	outputModeDepth.nYRes = 480;
	outputModeDepth.nFPS = 30;
	nRetVal = depth.SetMapOutputMode(outputModeDepth);
	checkError("Fehler beim Konfigurieren des Tiefengenerators", nRetVal)?0:exit(-1);


	/* Imagegenerator erstellen */
	nRetVal = image.Create(context);
	checkError("Fehler beim Erstellen des Bildgenerators", nRetVal)?0:exit(-1);

	/* Imagegenerator einstellen */
	XnMapOutputMode outputModeImage;
	outputModeImage.nXRes = 640;
	outputModeImage.nYRes = 480;
	outputModeImage.nFPS = 30;
	nRetVal = image.SetMapOutputMode(outputModeImage);
	checkError("Fehler beim Konfigurieren des Bildgenerators", nRetVal)?0:exit(-1);	

	/* Starten der Generatoren - volle Kraft vorraus! */
	nRetVal = context.StartGeneratingAll();
	checkError("Fehler beim Starten der Generatoren", nRetVal)?0:exit(-1);

	/* Glut initialisieren */
	glutInit(&argc, argv);
	glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE);
	glutInitWindowSize(WINDOW_SIZE_X, WINDOW_SIZE_Y);
	glutInitWindowPosition(300,150);
	win = glutCreateWindow("kinect-head-tracking");
	glClearColor(0, 0, 0, 0.0); //Hintergrundfarbe: Hier ein leichtes Blau
	glEnable(GL_DEPTH_TEST);          //Tiefentest aktivieren
	glDepthFunc(GL_LEQUAL);
//	glEnable(GL_CULL_FACE);           //Backface Culling aktivieren
//	glEnable(GL_ALPHA_TEST);
//	glAlphaFunc(GL_GEQUAL, 1);

	/* Texturen */
	glGenTextures(1, &texture_rgb);
	glBindTexture(GL_TEXTURE_2D, texture_rgb);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);


	glGenTextures(1, &texture_depth);
	glBindTexture(GL_TEXTURE_2D, texture_depth);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
	glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);

	glutDisplayFunc(glut_display);
	glutIdleFunc(glut_idle);
	glutMouseFunc(glut_mouse);
	glutMotionFunc(glut_mouse_motion);
	glutKeyboardFunc(glut_keyboard);
	glutMainLoop();
	return 0;
}
Пример #27
0
int main(int argc, char **argv)
{
    XnStatus nRetVal = XN_STATUS_OK;
    xn::EnumerationErrors errors;
    
    if( USE_RECORED_DATA ){
        g_Context.Init();
        g_Context.OpenFileRecording(RECORD_FILE_PATH);
        xn::Player player;
        
        // Player nodeの取得
        nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_PLAYER, player);
        CHECK_RC(nRetVal, "Find player");
        
        LOG_D("PlaybackSpeed: %d", player.GetPlaybackSpeed());
        
        xn:NodeInfoList nodeList;
        player.EnumerateNodes(nodeList);
        for( xn::NodeInfoList::Iterator it = nodeList.Begin();
            it != nodeList.End(); ++it){
            
            if( (*it).GetDescription().Type == XN_NODE_TYPE_IMAGE ){
                nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator);
                CHECK_RC(nRetVal, "Find image node");
                LOG_D("%s", "ImageGenerator created.");
            }
            else if( (*it).GetDescription().Type == XN_NODE_TYPE_DEPTH ){
                nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
                CHECK_RC(nRetVal, "Find depth node");
                LOG_D("%s", "DepthGenerator created.");            
            }
            else{
                LOG_D("%s %s %s", ::xnProductionNodeTypeToString((*it).GetDescription().Type ),
                      (*it).GetInstanceName(),
                      (*it).GetDescription().strName);
            }
        }
    }
    else{
        LOG_I("Reading config from: '%s'", CONFIG_XML_PATH);
        
        nRetVal = g_Context.InitFromXmlFile(CONFIG_XML_PATH, g_scriptNode, &errors);
        if (nRetVal == XN_STATUS_NO_NODE_PRESENT){
            XnChar strError[1024];
            errors.ToString(strError, 1024);
            LOG_E("%s\n", strError);
            return (nRetVal);
        }
        else if (nRetVal != XN_STATUS_OK){
            LOG_E("Open failed: %s", xnGetStatusString(nRetVal));
            return (nRetVal);
        }
        
        nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
        CHECK_RC(nRetVal,"No depth");
        
        // ImageGeneratorの作成
        nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator);
        CHECK_RC(nRetVal, "Find image generator");
        
    }
    // UserGeneratorの取得
    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    if(nRetVal!=XN_STATUS_OK){
        nRetVal = g_UserGenerator.Create(g_Context); 
        CHECK_RC(nRetVal, "Create user generator");
    }

    
    XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)){
        LOG_E("%s", "Supplied user generator doesn't support skeleton");
        return 1;
    }
    nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    CHECK_RC(nRetVal, "Register to user callbacks");

    g_SkeletonCap = g_UserGenerator.GetSkeletonCap();
    nRetVal = g_SkeletonCap.RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
    CHECK_RC(nRetVal, "Register to calibration start");

    nRetVal = g_SkeletonCap.RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
    CHECK_RC(nRetVal, "Register to calibration complete");
    
    if (g_SkeletonCap.NeedPoseForCalibration()){
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)){
            LOG_E("%s", "Pose required, but not supported");
            return 1;
        }
        nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
        CHECK_RC(nRetVal, "Register to Pose Detected");
        g_SkeletonCap.GetCalibrationPose(g_strPose);
    }
    
    g_SkeletonCap.SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
    
    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");
    
    // 表示用の画像データの作成
    XnMapOutputMode mapMode;
    g_ImageGenerator.GetMapOutputMode(mapMode);
    g_rgbImage = cvCreateImage(cvSize(mapMode.nXRes, mapMode.nYRes), IPL_DEPTH_8U, 3);

    LOG_I("%s", "Starting to run");
    if(g_bNeedPose){
        LOG_I("%s", "Assume calibration pose");
    }

    xn::Recorder recorder;
    if( DO_RECORED && !USE_RECORED_DATA ){
        // レコーダーの作成
        LOG_I("%s", "Setup Recorder");
        nRetVal = recorder.Create(g_Context);
        CHECK_RC(nRetVal, "Create recorder");
        
        // 保存設定
        nRetVal = recorder.SetDestination(XN_RECORD_MEDIUM_FILE, RECORD_FILE_PATH);
        CHECK_RC(nRetVal, "Set recorder destination file");
        
        // 深度、ビデオカメラ入力を保存対象として記録開始
        nRetVal = recorder.AddNodeToRecording(g_DepthGenerator, XN_CODEC_NULL);
        CHECK_RC(nRetVal, "Add depth node to recording");
        nRetVal = recorder.AddNodeToRecording(g_ImageGenerator, XN_CODEC_NULL);
        CHECK_RC(nRetVal, "Add image node to recording");
        
        LOG_I("%s", "Recorder setup done.");
    }

    while (!xnOSWasKeyboardHit())
    {
        g_Context.WaitOneUpdateAll(g_UserGenerator);
        if( DO_RECORED  && !USE_RECORED_DATA ){
            nRetVal = recorder.Record();
            CHECK_RC(nRetVal, "Record");
        }

        // ビデオカメラ画像の生データを取得
        xn::ImageMetaData imageMetaData;
        g_ImageGenerator.GetMetaData(imageMetaData);
        // メモリコピー
        xnOSMemCopy(g_rgbImage->imageData, imageMetaData.RGB24Data(), g_rgbImage->imageSize);
        // BGRからRGBに変換して表示
        cvCvtColor(g_rgbImage, g_rgbImage, CV_RGB2BGR);

        // UserGeneratorからユーザー識別ピクセルを取得
        xn::SceneMetaData sceneMetaData;
        g_UserGenerator.GetUserPixels(0, sceneMetaData);
        
        XnUserID allUsers[MAX_NUM_USERS];
        XnUInt16 nUsers = MAX_NUM_USERS;
        g_UserGenerator.GetUsers(allUsers, nUsers);
        for (int i = 0; i < nUsers; i++) {
            
            // キャリブレーションに成功しているかどうか
            if (g_SkeletonCap.IsTracking(allUsers[i])) {
                // スケルトンを描画
                DrawSkelton(allUsers[i], i);
            }
        }
        
        // 表示
        cvShowImage("User View", g_rgbImage);

        // ESCもしくはqが押されたら終了させる
        if (cvWaitKey(10) == 27) {
            break;
        }
    }

    if( !USE_RECORED_DATA ){
        g_scriptNode.Release();
    }
    g_DepthGenerator.Release();
    g_UserGenerator.Release();
    g_Context.Release();

	if (g_rgbImage != NULL) {
		cvReleaseImage(&g_rgbImage);	
	}
	g_Context.Shutdown();

    
}
Пример #28
0
// this function is called each frame
void glutDisplay (void)
{

	glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

	// Setup the OpenGL viewpoint
	glMatrixMode(GL_PROJECTION);
	glPushMatrix();
	glLoadIdentity();

	// Check if Registration is done for Depth and RGB Images - Brandyn, Sravanthi
	g_DepthGenerator.GetAlternativeViewPointCap().SetViewPoint(g_ImageGenerator);
//	g_DepthGenerator.GetAlternativeViewPointCap().ResetViewPoint();

	xn::SceneMetaData sceneMD;
	xn::DepthMetaData depthMD;
	xn::ImageMetaData imageMD;
	g_DepthGenerator.GetMetaData(depthMD);
	g_ImageGenerator.GetMetaData(imageMD);
	#ifdef USE_GLUT
	glOrtho(0, depthMD.XRes(), depthMD.YRes(), 0, -1.0, 1.0);
	#else
	glOrthof(0, depthMD.XRes(), depthMD.YRes(), 0, -1.0, 1.0);
	#endif

	glDisable(GL_TEXTURE_2D);

	if (!g_bPause)
	{
		// Read next available data
		g_Context.WaitAndUpdateAll();
	}

		// Process the data
		//DRAW
		// Check if Registration is done for Depth and RGB Images - Brandyn, Sravanthi
		g_DepthGenerator.GetAlternativeViewPointCap().SetViewPoint(g_ImageGenerator);
	//	g_DepthGenerator.GetAlternativeViewPointCap().ResetViewPoint();

		g_DepthGenerator.GetMetaData(depthMD);
		g_ImageGenerator.GetMetaData(imageMD);
		g_UserGenerator.GetUserPixels(0, sceneMD);


		DrawDepthMap(depthMD, imageMD, sceneMD, g_nPlayer);

		if (g_nPlayer != 0)
		{
			XnPoint3D com;
			g_UserGenerator.GetCoM(g_nPlayer, com);
			if (com.Z == 0)
			{
				g_nPlayer = 0;
				FindPlayer();
			}
		}

	#ifdef USE_GLUT
	glutSwapBuffers();
	#endif
}
Пример #29
0
int main(int argc, char **argv)
{
    XnStatus nRetVal = XN_STATUS_OK;
    xn::EnumerationErrors errors;
    TotalFrames = 0;
    static struct rusage ru;
    long double t;
    double w;
    struct timeval timStart;
    struct timeval timEnd;
    struct timeval Wall;
    bool Sample = true;
    XnFPSData xnFPS;
    XnUserID aUsers[MAX_NUM_USERS];
	XnUInt16 nUsers;
	XnSkeletonJointTransformation torsoJoint;

    if (argc > 1)
	{

    	//parse the cmd line
    	if (strcasecmp(argv[1],"--help") == 0)
    	{
    		PrintHelpHeader(argv[0]);
    		return 1;
    	}
    	numOfUser = atoi(argv[1]);
    	if(numOfUser == 0)
    	{
    		PrintHelpHeader(argv[0]);
    		return 1;
    	}
    	else if(numOfUser > 2)
    	{
    		printf("Maximal Users allowed is 2\n");
    		return 1;
    	}

	}
    else
    {
    	numOfUser = 4;
    }

    const char *fn = NULL;
    if    (fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
    else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
    else {
        printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
        return XN_STATUS_ERROR;
    }
    printf("Reading config from: '%s'\n", fn);

    nRetVal = g_Context.InitFromXmlFile(fn, g_scriptNode, &errors);
    if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
    {
        XnChar strError[1024];
        errors.ToString(strError, 1024);
        printf("%s\n", strError);
        return (nRetVal);
    }
    else if (nRetVal != XN_STATUS_OK)
    {
        printf("Open failed: %s\n", xnGetStatusString(nRetVal));
        return (nRetVal);
    }

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
    CHECK_RC(nRetVal,"No depth");

#if (XN_PLATFORM != XN_PLATFORM_MACOSX)	
    //we want out benchmark application will be running only on one CPU core
    cpu_set_t mask;
	CPU_ZERO(&mask);
	CPU_SET(1,&mask);
	sched_setaffinity(0,sizeof(mask),&mask);
#endif	
	//initialize the FPS calculator
	nRetVal = xnFPSInit(&xnFPS, 90);
	CHECK_RC(nRetVal, "FPS Init");
	//ensure the User generator exists
    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    if (nRetVal != XN_STATUS_OK)
    {
        nRetVal = g_UserGenerator.Create(g_Context);
        CHECK_RC(nRetVal, "Find user generator");
    }
    //register to generators callbacks
    XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
    {
        printf("Supplied user generator doesn't support skeleton\n");
        return 1;
    }
    nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    CHECK_RC(nRetVal, "Register to user callbacks");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
    CHECK_RC(nRetVal, "Register to calibration start");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
    CHECK_RC(nRetVal, "Register to calibration complete");
    //ensure calibration
    if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
    {
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
        {
            printf("Pose required, but not supported\n");
            return 1;
        }
        nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
        CHECK_RC(nRetVal, "Register to Pose Detected");
        g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
    }
    //set skeleton profile (all joints)
    g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
    //start to generate
    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");



    printf("%c[%d;%d;%dmPrimeSense Skeleton Benchmark Application\n ", 0x1B, BRIGHT,BLUE,BG_BLACK);
    printf("%c[%d;%d;%dmSet Maximal users to %d ", 0x1B, BRIGHT,BLUE,BG_BLACK,numOfUser);
	printf("%c[%dm\n", 0x1B, 0);
    printf("Starting to run\n");
    if(g_bNeedPose)
    {
        printf("Assume calibration pose\n");
    }

    XnUInt32 epochTime = 0;
    //each 30 frames (1 second) we start the CPU resources usages
    while (!xnOSWasKeyboardHit())
    {
    	if (Sample)
    	{	//get the beginning sample of CPU resources
    		getrusage(RUSAGE_SELF, &ru);
    		timStart=ru.ru_utime;
    		t=(double)timStart.tv_sec * 1000000.0 + (double)timStart.tv_usec \
    		   + (double)ru.ru_stime.tv_sec*1000000.0+(double)ru.ru_stime.tv_usec;
    		//get the wall clock time
    		gettimeofday(&Wall,NULL);

    		w=(double)Wall.tv_sec * 1000000.0 + (double)Wall.tv_usec;
    		Sample = false;
    	}
    	g_Context.WaitOneUpdateAll(g_UserGenerator);
    	xnFPSMarkFrame(&xnFPS);
        // print the torso information for the first user already tracking every 1 second to prevent CPU of printf
        if(TotalFrames % 30 == 0)
        {
			nUsers=MAX_NUM_USERS;
			g_UserGenerator.GetUsers(aUsers, nUsers);
			int numTracked=0;
			int userToPrint=-1;
			for(XnUInt16 i=0; i<nUsers; i++)
			{
				if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])==FALSE)
					continue;

				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_TORSO,torsoJoint);
				printf("User %d Located At distance of %6.2f mm from the sensor\n",aUsers[i],torsoJoint.position.position.Z);

			 }
			//get the finish sample of the CPU resources
			getrusage(RUSAGE_SELF, &ru);
			timEnd=ru.ru_utime;
			t = (double)timEnd.tv_sec * 1000000.0 + (double)timEnd.tv_usec \
				+	(double)ru.ru_stime.tv_sec*1000000.0+(double)ru.ru_stime.tv_usec	- t;
			//get the wall clock
			gettimeofday(&Wall,NULL);

			w = (double)Wall.tv_sec * 1000000.0 + (double)Wall.tv_usec - w;

			XnDouble fps=xnFPSCalc(&xnFPS);
			//print stuff.
			printf("%c[%d;%d;%dmCPU Utilization=%3.2f%%\t", 0x1B, BRIGHT,RED,BG_BLACK,(double)100*t/w);
			printf("%c[%d;%d;%dmFPS=%3.2f ", 0x1B, BRIGHT,RED,BG_BLACK,(double)fps);
			printf("%c[%dm\n", 0x1B, 0);
			Sample= true;

		}
        TotalFrames++;
        
    }
    g_scriptNode.Release();
    g_DepthGenerator.Release();
    g_UserGenerator.Release();
    g_Context.Release();

}
Пример #30
0
int main(int argc, char **argv)
{
	XnStatus rc = XN_STATUS_OK;

	rc = g_Context.InitFromXmlFile(SAMPLE_XML_PATH);
	CHECK_RC(rc, "InitFromXml");

	rc = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(rc, "Find depth generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_ImageGenerator);
	CHECK_RC(rc, "Find Image generator");
	rc = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	CHECK_RC(rc, "Find user generator");

	// Check if Registration is done for Depth and RGB Images - Brandyn, Sravanthi
	g_DepthGenerator.GetAlternativeViewPointCap().SetViewPoint(g_ImageGenerator);
	//g_DepthGenerator.GetAlternativeViewPointCap().ResetViewPoint();

	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON) ||
		!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
	{
		printf("User generator doesn't support either skeleton or pose detection.\n");
		return XN_STATUS_ERROR;
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	rc = g_Context.StartGeneratingAll();
	CHECK_RC(rc, "StartGenerating");

	XnCallbackHandle hUserCBs, hCalibrationCBs, hPoseCBs;
	g_UserGenerator.RegisterUserCallbacks(NewUser, LostUser, NULL, hUserCBs);
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(CalibrationStarted, CalibrationEnded, NULL, hCalibrationCBs);
	g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(PoseDetected, NULL, NULL, hPoseCBs);


	#ifdef USE_GLUT
	
	// Check if Registration is done for Depth and RGB Images - Brandyn, Sravanthi
	g_DepthGenerator.GetAlternativeViewPointCap().SetViewPoint(g_ImageGenerator);
	//g_DepthGenerator.GetAlternativeViewPointCap().ResetViewPoint();

	glInit(&argc, argv);
	glutMainLoop();

	#else

	if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context))
	{
		printf("Error initing opengles\n");
		CleanupExit();
	}

	glDisable(GL_DEPTH_TEST);
//	glEnable(GL_TEXTURE_2D);
	glEnableClientState(GL_VERTEX_ARRAY);
	glDisableClientState(GL_COLOR_ARRAY);

	while ((!_kbhit()) && (!g_bQuit))
	{
		glutDisplay();
		eglSwapBuffers(display, surface);
	}

	opengles_shutdown(display, surface, context);

	CleanupExit();

	#endif
}