Exemple #1
0
//---------------------------------------------------------------------------
// Code
//---------------------------------------------------------------------------
XnSensorFPS::XnSensorFPS() :
	m_FramesDump(XN_DUMP_CLOSED)
{
	xnFPSInit(&m_InputDepth, XN_SENSOR_FPS_FRAME_COUNT);
	xnFPSInit(&m_InputImage, XN_SENSOR_FPS_FRAME_COUNT);
	xnFPSInit(&m_ReadCalls, XN_SENSOR_FPS_FRAME_COUNT);
	xnFPSInit(&m_OutputDepth, XN_SENSOR_FPS_FRAME_COUNT);
	xnFPSInit(&m_OutputImage, XN_SENSOR_FPS_FRAME_COUNT);

	xnDumpInit(&m_FramesDump, XN_MASK_SENSOR_FPS, "TS,Type,FrameID,FrameTS\n", "FramesTimes.csv");
}
bool CinderOpenNISkeleton::setupFromXML(string path)
{
	XnStatus nRetVal = XN_STATUS_OK;
	xn::EnumerationErrors errors;

	std::cout << "Path:" << path.c_str() << std::endl;
	nRetVal = gCinderOpenNISkeleton->mContext.InitFromXmlFile(path.c_str(), &errors);


	nRetVal = xnFPSInit(&gCinderOpenNISkeleton->xnFPS, 180);
	CHECK_RC(nRetVal, "FPS Init", true);

	// Output device production nodes (user, depth, etc)
	gCinderOpenNISkeleton->debugOutputNodeTypes();

	// No nodes!?
	if (nRetVal == XN_STATUS_NO_NODE_PRESENT) {
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		app::console() << strError << endl;
		return false;
	}
	else if (nRetVal != XN_STATUS_OK)
	{
		app::console() << "Open failed: " << xnGetStatusString(nRetVal) << endl;
		return false;
	}


	// Find depth generator
	nRetVal = gCinderOpenNISkeleton->mContext.FindExistingNode(XN_NODE_TYPE_DEPTH, gCinderOpenNISkeleton->mDepthGenerator);
	CHECK_RC(nRetVal, "Find depth generator", true);

	// Find skeleton / user generator
	nRetVal = gCinderOpenNISkeleton->mContext.FindExistingNode(XN_NODE_TYPE_USER, gCinderOpenNISkeleton->mUserGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		// Create one
		nRetVal = gCinderOpenNISkeleton->mUserGenerator.Create(gCinderOpenNISkeleton->mContext);
		CHECK_RC(nRetVal, "Find user generator", true);
		return false;
	}

	// Check if user generator can detect skeleton
	if (!gCinderOpenNISkeleton->mUserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
		app::console() << "Supplied user generator doesn't support skeleton\n" << endl;
		return false;
	}

	// Register callbacks
	nRetVal = gCinderOpenNISkeleton->setupCallbacks();

	// Start generating
	nRetVal = gCinderOpenNISkeleton->mContext.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating", true);

	return true;
}
Exemple #3
0
//---------------------------------------------------------------------------
// Code
//---------------------------------------------------------------------------
XnVideoStream::XnVideoStream(HRESULT *phr, XnVideoSource *pParent, xn::ImageGenerator& imageGen, LPCWSTR pPinName) :
	CSourceStream(NAME("Video Stream"), phr, pParent, pPinName),
	m_imageGen(imageGen),
	m_bFlipVertically(FALSE),
	m_nPreferredMode(-1),
	m_Dump(pParent->m_Dump)
{
	ASSERT(phr);

	xnFPSInit(&m_FPS, 90);

	XnUInt32 nSupportedModes = m_imageGen.GetSupportedMapOutputModesCount();
	XnMapOutputMode* aOutputModes = new XnMapOutputMode[nSupportedModes];

	XnStatus nRetVal = m_imageGen.GetSupportedMapOutputModes(aOutputModes, nSupportedModes);
	if (nRetVal != XN_STATUS_OK)
	{
		*phr = E_UNEXPECTED;
		delete[] aOutputModes;
		return;
	}

	nRetVal = m_aSupportedModes.Reserve(nSupportedModes);
	if (nRetVal != XN_STATUS_OK)
	{
		*phr = E_UNEXPECTED;
		delete[] aOutputModes;
		return;
	}

	XnBool bRGB = m_imageGen.IsPixelFormatSupported(XN_PIXEL_FORMAT_RGB24);
	XnBool bMJPEG = m_imageGen.IsPixelFormatSupported(XN_PIXEL_FORMAT_MJPEG);
	Mode mode;

	for (XnUInt32 i = 0; i < nSupportedModes; ++i)
	{
		mode.OutputMode = aOutputModes[i];
		if (bRGB)
		{
			mode.Format = XN_PIXEL_FORMAT_RGB24;
			m_aSupportedModes.AddLast(mode);
		}

		if (bMJPEG)
		{
			mode.Format = XN_PIXEL_FORMAT_MJPEG;
			m_aSupportedModes.AddLast(mode);
		}
	}

	CMediaType mediaType;
	GetMediaType(0, &mediaType);
	SetMediaType(&mediaType);
}
// Barebones setup
bool CinderOpenNISkeleton::setup()
{
	XnStatus nRetVal = XN_STATUS_OK;
	xn::EnumerationErrors errors;

	// Init
	nRetVal = mContext.Init();
	CHECK_RC(nRetVal, "Init", true);

	nRetVal = xnFPSInit(&xnFPS, 180);
	CHECK_RC(nRetVal, "FPS Init", true);

    worldOffset.x = -160;
    worldOffset.y = -200;
    worldOffset.z = -2784;

	// Output device production nodes (user, depth, etc)
	debugOutputNodeTypes();

	_allUsers.clear();
	for(int i = 0; i < maxUsers; i++) {
		UserSkeleton emptySkeleton;
		emptySkeleton.isValid = false;
		emptySkeleton.id = currentUsers[i];

		// Create a bunch of spots to contain our vectors
		emptySkeleton.projectedPositions.clear();
		for( int j = 0; j <= XN_SKEL_RIGHT_FOOT+1; ++j) {
			emptySkeleton.projectedPositions.push_back( ci::Vec3f::zero() );
		}

		_allUsers.push_back( emptySkeleton );
	}

	return true;
}
int main(int argc, char **argv)
{
    XnStatus nRetVal = XN_STATUS_OK;
    xn::EnumerationErrors errors;
    TotalFrames = 0;
    static struct rusage ru;
    long double t;
    double w;
    struct timeval timStart;
    struct timeval timEnd;
    struct timeval Wall;
    bool Sample = true;
    XnFPSData xnFPS;
    XnUserID aUsers[MAX_NUM_USERS];
	XnUInt16 nUsers;
	XnSkeletonJointTransformation torsoJoint;

    if (argc > 1)
	{

    	//parse the cmd line
    	if (strcasecmp(argv[1],"--help") == 0)
    	{
    		PrintHelpHeader(argv[0]);
    		return 1;
    	}
    	numOfUser = atoi(argv[1]);
    	if(numOfUser == 0)
    	{
    		PrintHelpHeader(argv[0]);
    		return 1;
    	}
    	else if(numOfUser > 2)
    	{
    		printf("Maximal Users allowed is 2\n");
    		return 1;
    	}

	}
    else
    {
    	numOfUser = 4;
    }

    const char *fn = NULL;
    if    (fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH;
    else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL;
    else {
        printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL);
        return XN_STATUS_ERROR;
    }
    printf("Reading config from: '%s'\n", fn);

    nRetVal = g_Context.InitFromXmlFile(fn, g_scriptNode, &errors);
    if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
    {
        XnChar strError[1024];
        errors.ToString(strError, 1024);
        printf("%s\n", strError);
        return (nRetVal);
    }
    else if (nRetVal != XN_STATUS_OK)
    {
        printf("Open failed: %s\n", xnGetStatusString(nRetVal));
        return (nRetVal);
    }

    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
    CHECK_RC(nRetVal,"No depth");

#if (XN_PLATFORM != XN_PLATFORM_MACOSX)	
    //we want out benchmark application will be running only on one CPU core
    cpu_set_t mask;
	CPU_ZERO(&mask);
	CPU_SET(1,&mask);
	sched_setaffinity(0,sizeof(mask),&mask);
#endif	
	//initialize the FPS calculator
	nRetVal = xnFPSInit(&xnFPS, 90);
	CHECK_RC(nRetVal, "FPS Init");
	//ensure the User generator exists
    nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
    if (nRetVal != XN_STATUS_OK)
    {
        nRetVal = g_UserGenerator.Create(g_Context);
        CHECK_RC(nRetVal, "Find user generator");
    }
    //register to generators callbacks
    XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected;
    if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
    {
        printf("Supplied user generator doesn't support skeleton\n");
        return 1;
    }
    nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
    CHECK_RC(nRetVal, "Register to user callbacks");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
    CHECK_RC(nRetVal, "Register to calibration start");
    nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
    CHECK_RC(nRetVal, "Register to calibration complete");
    //ensure calibration
    if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
    {
        g_bNeedPose = TRUE;
        if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
        {
            printf("Pose required, but not supported\n");
            return 1;
        }
        nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
        CHECK_RC(nRetVal, "Register to Pose Detected");
        g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
    }
    //set skeleton profile (all joints)
    g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
    //start to generate
    nRetVal = g_Context.StartGeneratingAll();
    CHECK_RC(nRetVal, "StartGenerating");



    printf("%c[%d;%d;%dmPrimeSense Skeleton Benchmark Application\n ", 0x1B, BRIGHT,BLUE,BG_BLACK);
    printf("%c[%d;%d;%dmSet Maximal users to %d ", 0x1B, BRIGHT,BLUE,BG_BLACK,numOfUser);
	printf("%c[%dm\n", 0x1B, 0);
    printf("Starting to run\n");
    if(g_bNeedPose)
    {
        printf("Assume calibration pose\n");
    }

    XnUInt32 epochTime = 0;
    //each 30 frames (1 second) we start the CPU resources usages
    while (!xnOSWasKeyboardHit())
    {
    	if (Sample)
    	{	//get the beginning sample of CPU resources
    		getrusage(RUSAGE_SELF, &ru);
    		timStart=ru.ru_utime;
    		t=(double)timStart.tv_sec * 1000000.0 + (double)timStart.tv_usec \
    		   + (double)ru.ru_stime.tv_sec*1000000.0+(double)ru.ru_stime.tv_usec;
    		//get the wall clock time
    		gettimeofday(&Wall,NULL);

    		w=(double)Wall.tv_sec * 1000000.0 + (double)Wall.tv_usec;
    		Sample = false;
    	}
    	g_Context.WaitOneUpdateAll(g_UserGenerator);
    	xnFPSMarkFrame(&xnFPS);
        // print the torso information for the first user already tracking every 1 second to prevent CPU of printf
        if(TotalFrames % 30 == 0)
        {
			nUsers=MAX_NUM_USERS;
			g_UserGenerator.GetUsers(aUsers, nUsers);
			int numTracked=0;
			int userToPrint=-1;
			for(XnUInt16 i=0; i<nUsers; i++)
			{
				if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])==FALSE)
					continue;

				g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_TORSO,torsoJoint);
				printf("User %d Located At distance of %6.2f mm from the sensor\n",aUsers[i],torsoJoint.position.position.Z);

			 }
			//get the finish sample of the CPU resources
			getrusage(RUSAGE_SELF, &ru);
			timEnd=ru.ru_utime;
			t = (double)timEnd.tv_sec * 1000000.0 + (double)timEnd.tv_usec \
				+	(double)ru.ru_stime.tv_sec*1000000.0+(double)ru.ru_stime.tv_usec	- t;
			//get the wall clock
			gettimeofday(&Wall,NULL);

			w = (double)Wall.tv_sec * 1000000.0 + (double)Wall.tv_usec - w;

			XnDouble fps=xnFPSCalc(&xnFPS);
			//print stuff.
			printf("%c[%d;%d;%dmCPU Utilization=%3.2f%%\t", 0x1B, BRIGHT,RED,BG_BLACK,(double)100*t/w);
			printf("%c[%d;%d;%dmFPS=%3.2f ", 0x1B, BRIGHT,RED,BG_BLACK,(double)fps);
			printf("%c[%dm\n", 0x1B, 0);
			Sample= true;

		}
        TotalFrames++;
        
    }
    g_scriptNode.Release();
    g_DepthGenerator.Release();
    g_UserGenerator.Release();
    g_Context.Release();

}
Exemple #6
0
int ofxKinect::openKinect(){

#ifdef OPENNI
    kinectContext = new Context;

    rc = XN_STATUS_OK;

        rc = kinectContext->InitFromXmlFile(xml_path.c_str(), &errors);

	if (rc == XN_STATUS_NO_NODE_PRESENT)
	{
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		printf("%s\n", strError);
		return (rc);
	}
	else if (rc != XN_STATUS_OK)
	{
		printf("Open failed: %s\n", xnGetStatusString(rc));
		return (rc);
	}


	rc = kinectContext->FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
	CHECK_RC(rc, "Find depth generator");

    if (bImage){
        rc = kinectContext->FindExistingNode(XN_NODE_TYPE_IMAGE, image);
        CHECK_RC(rc, "Find image generator");


        XnBool isSupported = depth.IsCapabilitySupported("AlternativeViewPoint");
        if(isSupported){
          XnStatus res = depth.GetAlternativeViewPointCap().SetViewPoint(image);
          if(XN_STATUS_OK != res){
            printf("Getting and setting AlternativeViewPoint failed: %s\n", xnGetStatusString(res));
          }

        }
        rc = xnFPSInit(&xnFPS, 30);
        CHECK_RC(rc, "FPS Init");

        rc = kinectContext->StartGeneratingAll();
        CHECK_RC(rc, "StartGenerating");

    }else{
        rc = kinectContext->FindExistingNode(XN_NODE_TYPE_USER, userGenerator);
        CHECK_RC(rc, "Find user generator");



        if (!userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON) ||
            !userGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
        {
            printf("User generator doesn't support either skeleton or pose detection.\n");
            return XN_STATUS_ERROR;
        }


        rc = xnFPSInit(&xnFPS, 30);
        CHECK_RC(rc, "FPS Init");


        userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

        rc = kinectContext->StartGeneratingAll();
        CHECK_RC(rc, "StartGenerating");

        XnCallbackHandle hUserCBs, hCalibrationStartCB, hCalibrationCompleteCB, hPoseCBs;

        rc = userGenerator.RegisterUserCallbacks(NewUser, LostUser, this, hUserCBs);

        rc = userGenerator.GetSkeletonCap().RegisterToCalibrationStart(CalibrationStarted, this, hCalibrationStartCB);
        CHECK_RC(rc, "Register to calbiration start");
        rc = userGenerator.GetSkeletonCap().RegisterToCalibrationComplete(CalibrationCompleted, this, hCalibrationCompleteCB);
        CHECK_RC(rc, "Register to calibration complete");
        rc = userGenerator.GetPoseDetectionCap().RegisterToPoseDetected(PoseDetected, this, hPoseCBs);
        CHECK_RC(rc, "Register to pose detected");

    }


	depth.GetMetaData(depthMD);


	if (bImage)
        image.GetMetaData(imageMD);

#endif
}