Example #1
0
void XnSensorFPS::Mark(XnFPSData* pFPS, const XnChar* csName, XnUInt32 nFrameID, XnUInt64 nTS)
{
	if (!xnLogIsEnabled(XN_MASK_SENSOR_FPS, XN_LOG_VERBOSE))
		return;

	XnUInt64 nNow;
	xnOSGetHighResTimeStamp(&nNow);

	xnFPSMarkFrame(pFPS, nNow);

	xnDumpWriteString(m_FramesDump, "%llu,%s,%u,%llu\n", nNow, csName, nFrameID, nTS);

	// get current time in seconds
	nNow /= 1000000;

	if (nNow != m_nLastPrint)
	{
		m_nLastPrint = nNow;
		xnLogVerbose(XN_MASK_SENSOR_FPS, "[FPS] InputFrames - I: %5.2f, D: %5.2f, OutputFrames - I: %5.2f, D: %5.2f",
			xnFPSCalc(&m_InputImage), xnFPSCalc(&m_InputDepth), xnFPSCalc(&m_OutputImage), xnFPSCalc(&m_OutputDepth));
	}
}
Example #2
0
int main(int argc, char **argv)
{
    XnStatus nRetVal = XN_STATUS_OK;
    xn::EnumerationErrors errors;
    TotalFrames = 0;
    static struct rusage ru;
    long double t;
    double w;
    struct timeval timStart;
    struct timeval timEnd;
    struct timeval Wall;
    bool Sample = true;
    XnFPSData xnFPS;
    XnUserID aUsers[MAX_NUM_USERS];
	XnUInt16 nUsers;
	XnSkeletonJointTransformation torsoJoint;

    if (argc > 1)
	{

    	//parse the cmd line
    	if (strcasecmp(argv[1],"--help") == 0)
    	{
    		PrintHelpHeader(argv[0]);
    		return 1;
    	}
    	numOfUser = atoi(argv[1]);
    	if(numOfUser == 0)
    	{
    		PrintHelpHeader(argv[0]);
    		return 1;
    	}
    	else if(numOfUser > 2)
    	{
    		printf("Maximal Users allowed is 2\n");
    		return 1;
    	}

	}
    else
    {
    	numOfUser = 4;
    }

    const char *fn = NULL;
    if    (fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
    else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
    else {
        printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
        return XN_STATUS_ERROR;
    }
    printf("Reading config from: '%s'\n", fn);

    nRetVal = g_Context.InitFromXmlFile(fn, g_scriptNode, &errors);
    if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
    {
        XnChar strError[1024];
        errors.ToString(strError, 1024);
        printf("%s\n", strError);
        return (nRetVal);
    }
    else if (nRetVal != XN_STATUS_OK)
    {
        printf("Open failed: %s\n", xnGetStatusString(nRetVal));
        return (nRetVal);
    }

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
    CHECK_RC(nRetVal,"No depth");

#if (XN_PLATFORM != XN_PLATFORM_MACOSX)	
    //we want out benchmark application will be running only on one CPU core
    cpu_set_t mask;
	CPU_ZERO(&mask);
	CPU_SET(1,&mask);
	sched_setaffinity(0,sizeof(mask),&mask);
#endif	
	//initialize the FPS calculator
	nRetVal = xnFPSInit(&xnFPS, 90);
	CHECK_RC(nRetVal, "FPS Init");
	//ensure the User generator exists
    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    if (nRetVal != XN_STATUS_OK)
    {
        nRetVal = g_UserGenerator.Create(g_Context);
        CHECK_RC(nRetVal, "Find user generator");
    }
    //register to generators callbacks
    XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
    {
        printf("Supplied user generator doesn't support skeleton\n");
        return 1;
    }
    nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    CHECK_RC(nRetVal, "Register to user callbacks");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
    CHECK_RC(nRetVal, "Register to calibration start");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
    CHECK_RC(nRetVal, "Register to calibration complete");
    //ensure calibration
    if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
    {
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
        {
            printf("Pose required, but not supported\n");
            return 1;
        }
        nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
        CHECK_RC(nRetVal, "Register to Pose Detected");
        g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
    }
    //set skeleton profile (all joints)
    g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
    //start to generate
    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");



    printf("%c[%d;%d;%dmPrimeSense Skeleton Benchmark Application\n ", 0x1B, BRIGHT,BLUE,BG_BLACK);
    printf("%c[%d;%d;%dmSet Maximal users to %d ", 0x1B, BRIGHT,BLUE,BG_BLACK,numOfUser);
	printf("%c[%dm\n", 0x1B, 0);
    printf("Starting to run\n");
    if(g_bNeedPose)
    {
        printf("Assume calibration pose\n");
    }

    XnUInt32 epochTime = 0;
    //each 30 frames (1 second) we start the CPU resources usages
    while (!xnOSWasKeyboardHit())
    {
    	if (Sample)
    	{	//get the beginning sample of CPU resources
    		getrusage(RUSAGE_SELF, &ru);
    		timStart=ru.ru_utime;
    		t=(double)timStart.tv_sec * 1000000.0 + (double)timStart.tv_usec \
    		   + (double)ru.ru_stime.tv_sec*1000000.0+(double)ru.ru_stime.tv_usec;
    		//get the wall clock time
    		gettimeofday(&Wall,NULL);

    		w=(double)Wall.tv_sec * 1000000.0 + (double)Wall.tv_usec;
    		Sample = false;
    	}
    	g_Context.WaitOneUpdateAll(g_UserGenerator);
    	xnFPSMarkFrame(&xnFPS);
        // print the torso information for the first user already tracking every 1 second to prevent CPU of printf
        if(TotalFrames % 30 == 0)
        {
			nUsers=MAX_NUM_USERS;
			g_UserGenerator.GetUsers(aUsers, nUsers);
			int numTracked=0;
			int userToPrint=-1;
			for(XnUInt16 i=0; i<nUsers; i++)
			{
				if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])==FALSE)
					continue;

				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_TORSO,torsoJoint);
				printf("User %d Located At distance of %6.2f mm from the sensor\n",aUsers[i],torsoJoint.position.position.Z);

			 }
			//get the finish sample of the CPU resources
			getrusage(RUSAGE_SELF, &ru);
			timEnd=ru.ru_utime;
			t = (double)timEnd.tv_sec * 1000000.0 + (double)timEnd.tv_usec \
				+	(double)ru.ru_stime.tv_sec*1000000.0+(double)ru.ru_stime.tv_usec	- t;
			//get the wall clock
			gettimeofday(&Wall,NULL);

			w = (double)Wall.tv_sec * 1000000.0 + (double)Wall.tv_usec - w;

			XnDouble fps=xnFPSCalc(&xnFPS);
			//print stuff.
			printf("%c[%d;%d;%dmCPU Utilization=%3.2f%%\t", 0x1B, BRIGHT,RED,BG_BLACK,(double)100*t/w);
			printf("%c[%d;%d;%dmFPS=%3.2f ", 0x1B, BRIGHT,RED,BG_BLACK,(double)fps);
			printf("%c[%dm\n", 0x1B, 0);
			Sample= true;

		}
        TotalFrames++;
        
    }
    g_scriptNode.Release();
    g_DepthGenerator.Release();
    g_UserGenerator.Release();
    g_Context.Release();

}
Example #3
0
HRESULT XnVideoStream::FillBuffer(IMediaSample *pms)
{
	XN_METHOD_START;

	XN_METHOD_CHECK_POINTER(pms);

	if (!m_imageGen.IsGenerating())
	{
		XN_METHOD_RETURN(E_UNEXPECTED);
	}

	VIDEOINFOHEADER* videoInfo = (VIDEOINFOHEADER*)m_mt.Format();
	bool bUpsideDown = videoInfo->bmiHeader.biHeight > 0;

	if (m_bFlipVertically)
	{
		bUpsideDown = !bUpsideDown;
	}

	BYTE *pData;
	long lDataLen;

	pms->GetPointer(&pData);
	lDataLen = pms->GetSize();

	{
		CAutoLock cAutoLock(m_pFilter->pStateLock());

		XnStatus nRetVal = XN_STATUS_OK;

		// ignore timeouts
		for(;;)
		{
			nRetVal = m_imageGen.WaitAndUpdateData();
			if (nRetVal != XN_STATUS_WAIT_DATA_TIMEOUT)
			{
				break;
			}
			else
			{
				xnDumpFileWriteString(m_Dump, "\tTimeout during FillBuffer\n");
			}
		}

		if (nRetVal != XN_STATUS_OK) XN_METHOD_RETURN(E_UNEXPECTED);
	}

	xn::ImageMetaData imageMD;
	m_imageGen.GetMetaData(imageMD);

	if (imageMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24)
	{
		const XnRGB24Pixel* pImage = imageMD.RGB24Data();
		if (bUpsideDown)
		{
			// convert from left-to-right top-to-bottom RGB to left-to-right bottom-to-top BGR
			pImage += imageMD.XRes() * imageMD.YRes() - 1;

			for (XnUInt32 y = 0; y < imageMD.YRes(); ++y)
			{
				for (XnUInt32 x = 0; x < imageMD.XRes(); ++x, pImage -=1, pData += 3)
				{
					// translate RGB to BGR
					pData[0] = pImage->nBlue;
					pData[1] = pImage->nGreen;
					pData[2] = pImage->nRed;
				}
			}
		}
		else
		{
			for (XnUInt32 y = 0; y < imageMD.YRes(); ++y)
			{
				for (XnUInt32 x = 0; x < imageMD.XRes(); ++x, pImage += 1, pData += 3)
				{
					// translate RGB to BGR
					pData[0] = pImage->nBlue;
					pData[1] = pImage->nGreen;
					pData[2] = pImage->nRed;
				}
			}
		}
	}
	else if (imageMD.PixelFormat() == XN_PIXEL_FORMAT_MJPEG)
	{
		memcpy(pData, imageMD.Data(), imageMD.DataSize());
		pms->SetActualDataLength(imageMD.DataSize());
	}
	else
	{
		xnLogError(XN_MASK_FILTER, "Unsupported pixel format!");
		XN_METHOD_RETURN(E_UNEXPECTED);
	}

	// The current time is the sample's start
//		CRefTime rtStart = m_rtSampleTime;

	// Increment to find the finish time
//		m_rtSampleTime += (LONG)m_iRepeatTime;

//		pms->SetTime((REFERENCE_TIME *) &rtStart,(REFERENCE_TIME *) &m_rtSampleTime);

	pms->SetSyncPoint(TRUE);

	xnFPSMarkFrame(&m_FPS);

	XN_METHOD_RETURN(NOERROR);
}