Exemplo n.º 1
0
// EXPORT FUNCTION:Init()
__declspec(dllexport) bool __stdcall OpenNIInit( HWND hWnd, bool EngFlag, LPDIRECT3DDEVICE9 lpDevice, WCHAR* f_path, CHAR* onifilename )
{
	TrackingF=false;
	for( int i = 0; i < 15; ++ i )
		TrCount[i] = 0;

	SetCurrentDirectoryW( f_path );

	if( nite::NiTE::initialize() == nite::STATUS_OK )
	{
		if( g_UserTracker.create() == nite::STATUS_OK )
		{
			nite::UserTrackerFrameRef mUserFrame;
			if( g_UserTracker.readFrame( &mUserFrame ) == nite::STATUS_OK )
			{
				openni::VideoFrameRef mDepthMap = mUserFrame.getDepthFrame();
				int x = mDepthMap.getWidth(),
					y = mDepthMap.getHeight();
				
				texWidth =  getClosestPowerOfTwo( x / 4 );
				texHeight = getClosestPowerOfTwo( y / 4 );
				
				if( FAILED( lpDevice->CreateTexture( texWidth, texHeight, 1, 0, D3DFMT_A8R8G8B8, D3DPOOL_MANAGED, &DepthTex, NULL ) ) )
				{
					MessageBox( hWnd, L"Cannot create depth texture", L"NiTE2", MB_OK );
					OpenNIClean();
					return false;
				}

				return true;
			}
			else
			{
				printError( hWnd, "UserTracker.readFrame" );
				MessageBox( hWnd, L"Cannot read user tracker frame", L"NiTE2", MB_OK );
				OpenNIClean();
				return false;
			}
		}
		else
		{
			printError( hWnd, "UserTracker.create" );
			MessageBox( hWnd, L"Cannot create user tracker", L"NiTE2", MB_OK );
			OpenNIClean();
			return false;
		}
	}
	else
	{
		printError( hWnd, "Init" );
		MessageBox( hWnd, L"Cannot initial NiTE", L"NiTE2", MB_OK );
		return false;
	}
}
Exemplo n.º 2
0
void UserTracker::onNewFrame(nite::UserTracker &tracker)
{
	nite::Status rc = tracker.readFrame(&userTrackerFrame);
	
	if (rc != nite::STATUS_OK)
	{
		check_error(rc);
		return;
	}
	
	user_map = userTrackerFrame.getUserMap();
	
	mutex->lock();
	{
		const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
		for (int i = 0; i < users.getSize(); i++)
			users_data.push_back(users[i]);
	}
	mutex->unlock();
	
	{
		openni::VideoFrameRef frame = userTrackerFrame.getDepthFrame();
		
		const unsigned short *pixels = (const unsigned short*)frame.getData();
		int w = frame.getVideoMode().getResolutionX();
		int h = frame.getVideoMode().getResolutionY();
		int num_pixels = w * h;
		
		pix.allocate(w, h, 1);
		pix.getBackBuffer().setFromPixels(pixels, w, h, OF_IMAGE_GRAYSCALE);
		pix.swap();
	}
}
Exemplo n.º 3
0
// EXPORT FUNCTION:Clean()
__declspec(dllexport) void __stdcall OpenNIClean()
{
	if( DepthTex )
	{
		DepthTex->Release();
		DepthTex = NULL;
	}
	g_UserTracker.destroy();
	nite::NiTE::shutdown();
	TrackingF = false;
}
Exemplo n.º 4
0
void CGraph::JointFun(const nite::UserTracker&usertracker, const nite::UserData&data, const int&mapx, const int&mapy)
{
	static const unsigned int total = JOINTDATA_SIZE - 2;
	float cood[3 * total];
	memset(cood, 0, sizeof(float) * 3 * total);
	for (int i = 0; i < total; i++)
	{
		//if ((i == SHOULDER_ASIDE || i == HEAD_ASIDE)) continue;
		x[i] = data.getSkeleton().getJoint(jType[i]).getPosition().x;
		y[i] = data.getSkeleton().getJoint(jType[i]).getPosition().y;
		z[i] = data.getSkeleton().getJoint(jType[i]).getPosition().z;
	}

	if (x[0] == 0 || y[0] == 0 || x[1] == 0 || y[1] == 0 || x[2] == 0 || y[2] == 0)
	{
		istracked = 0;
		m_angle = 0;
		return;
	}
	istracked = 1;
	/*if (abs(x[LEFT_SHOULDER] - x[RIGHT_SHOULDER]) <= 10)
	{
	m_playmode = PLAYMODE_DEPTH_SIDE;
	}
	else
	{
	m_playmode = PLAYMODE_DEPTH;
	}*/
	for (int i = 0; i < total; i++)
	{
		usertracker.convertJointCoordinatesToDepth(x[i], y[i], z[i], cood + 3 * i, cood + 3 * i + 1);
		cood[3 * i + 0] *= m_width*1.0 / mapx;
		cood[3 * i + 1] *= m_height*1.0 / mapy;
		cood[3 * i + 2] *= 0;
	}
	glColor3f(1, 0.5, 0);
	glEnable(GL_CULL_FACE);
	glPolygonMode(GL_FRONT, GL_FILL);
	glCullFace(GL_BACK);
	for (int i = 1; i < total; i++)
	{
		glPushMatrix();
		glLoadIdentity();
		glTranslatef(cood[i * 3], cood[i * 3 + 1], cood[i * 3 + 2]);
		glutSolidSphere(10, 100, 100);
		glPopMatrix();
	}
	glDisable(GL_CULL_FACE);

};
Exemplo n.º 5
0
void drawSkeleton( nite::UserTrackerFrameRef& userFrame,
                   nite::UserTracker& userTracker,
                   cv::Mat& image)
{
    const nite::Array<nite::UserData>& users = userFrame.getUsers();
    for ( int i = 0; i < users.getSize(); ++i ) {
        const nite::UserData& user = users[i];
        if ( user.isNew() ) {
            userTracker.startSkeletonTracking( user.getId() );
            userTracker.startPoseDetection( user.getId(), nite::POSE_PSI);
            userTracker.startPoseDetection( user.getId(), nite::POSE_CROSSED_HANDS);
        }
        else if ( !user.isLost() ) {
            // skeletonの表示
            const auto skeleton = user.getSkeleton();
            if ( skeleton.getState() == nite::SkeletonState::SKELETON_TRACKED ) {
                for ( int j = 0; j < 15; j++ ) {
                    const auto joint = skeleton.getJoint((nite::JointType)j);
                    if ( joint.getPositionConfidence() >= 0.7f ) {
                        const auto position = joint.getPosition();
                        float x = 0, y = 0;
                        userTracker.convertJointCoordinatesToDepth(
                            position.x, position.y, position.z, &x, &y );

                        cv::circle( image, cvPoint( (int)x, (int)y ),
                            3, cv::Scalar( 0, 0, 255 ), 5 );
                    }
                }
            }
            // poseの表示
            const auto pose_psi = user.getPose(nite::POSE_PSI);
            if( pose_psi.isHeld() || pose_psi.isEntered() )
            {
                auto center = user.getCenterOfMass();
                float x = 0, y = 0;
                userTracker.convertJointCoordinatesToDepth(center.x, center.y, center.z, &x, &y);
                cv::putText(image, "PSI", cv::Point2f(x,y), cv::FONT_HERSHEY_SIMPLEX, 2, cv::Scalar(0xFF,0xFF,0xFF));
            }
            const auto pose_cross = user.getPose(nite::POSE_CROSSED_HANDS);
            if( pose_cross.isHeld() || pose_cross.isEntered() ){
                auto center = user.getCenterOfMass();
                float x = 0, y = 0;
                userTracker.convertJointCoordinatesToDepth(center.x, center.y, center.z, &x, &y);
                cv::putText(image, "Cross", cv::Point2f(x,y), cv::FONT_HERSHEY_COMPLEX, 2, cv::Scalar(0xFF,0xFF,0xFF));
            }
        }
    }
}
Exemplo n.º 6
0
void PlayerTracker::onNewFrame(nite::UserTracker& userTracker)
{
  {
    const juce::ScopedLock sL(trackerAccess);
    userTracker.readFrame(&userTrackerFrame);
  }
  if (niteRc != nite::STATUS_OK)
  {
    return;
  }

  const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers();
  for (int i = 0; i < users.getSize(); ++i) {

    const nite::UserData& user = users[i];
    updateUserState(user,userTrackerFrame.getTimestamp());
    if (user.isNew())
    {
      userTracker.startSkeletonTracking(user.getId());
      userTracker.setSkeletonSmoothingFactor(.85f);
    }
    else if (user.getSkeleton().getState() == nite::SKELETON_TRACKED)
    {    
      //If hands positions have not enough confidence, this frame is skipped
      if (! handsTracker.update(user.getSkeleton()))
        return;

      if (transportGesture.checkTransportGesture(handsTracker, user.getSkeleton()))
      {
        if (transportGesture.getTransportStatus() == TransportGesture::PLAY)
        {
          activateMusicalGestureDetection();
          sequencer->play();
        }
        else if (transportGesture.getTransportStatus() == TransportGesture::PAUSE)
        {
          deactivateMusicalGestureDetection();
          sequencer->pause();
        }
        else if (transportGesture.getTransportStatus() == TransportGesture::STOP)
        {
          deactivateMusicalGestureDetection();
          sequencer->stop();
        }
      }

      //Detect expression changes
      if (musicalGestureDetectionActivated && expressionGesture.checkExpressionGesture(handsTracker, user.getSkeleton()))
      {
        sequencer->setExpression(expressionGesture.getExpressionDetected());
      }

      //Detect tempo changes
      if (musicalGestureDetectionActivated && tempoGesture.checkTempoGesture(handsTracker, user.getSkeleton()))
      {
        sequencer->setTempo(tempoGesture.getTempo());
      }

      //Send hands position to game (display hands as spheres)

      Sim::postEvent(Sim::getRootGroup(), new HandsMove(handsTracker.torqueCoordinatesLeftHand, handsTracker.torqueCoordinatesRightHand), -1);
     
    }
  } 
}
Exemplo n.º 7
0
// EXPORT FUNCTION:DrawDepthMap()
__declspec(dllexport) void __stdcall OpenNIDrawDepthMap( bool waitflag )
{
	nite::UserTrackerFrameRef mUserFrame;
	if( g_UserTracker.readFrame( &mUserFrame ) == nite::STATUS_OK )
	{
		const nite::UserMap& rUserMap = mUserFrame.getUserMap();
		const nite::UserId* pLabels = rUserMap.getPixels();

		openni::VideoFrameRef mDepthMap = mUserFrame.getDepthFrame();
		const openni::DepthPixel* pDepth = static_cast<const openni::DepthPixel*>( mDepthMap.getData() );
		
		int iXRes = mDepthMap.getWidth(),
			iYRes = mDepthMap.getHeight();
		
		D3DLOCKED_RECT LPdest;
		DepthTex->LockRect(0,&LPdest,NULL, 0);
		UCHAR *pDestImage=(UCHAR*)LPdest.pBits;
		
		// Calculate the accumulative histogram
		ZeroMemory( g_pDepthHist, MAX_DEPTH * sizeof(float) );
		UINT nValue=0;
		UINT nNumberOfPoints = 0;
		for( int nY = 0; nY < iYRes; ++ nY )
		{
			for( int nX = 0; nX < iXRes; ++ nX )
			{
				nValue = *pDepth;
				if(nValue !=0)
				{
					g_pDepthHist[nValue]++;
					nNumberOfPoints++;
				}
				pDepth++;
			}
		}
		
		for( int nIndex = 1; nIndex < MAX_DEPTH; nIndex++ )
		{
			g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1];
		}
		
		if( nNumberOfPoints )
		{
			for( int nIndex = 1; nIndex < MAX_DEPTH; nIndex++ )
			{
				g_pDepthHist[nIndex] = (float)((UINT)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints))));
			}
		}
		
		UINT nHistValue = 0;
		if( g_bDrawPixels )
		{
			// Prepare the texture map
			for( int nY = 0; nY < iYRes; nY += 4 )
			{
				for( int nX=0; nX < iXRes; nX += 4 )
				{
					pDestImage[0] = 0;
					pDestImage[1] = 0;
					pDestImage[2] = 0;
					pDestImage[3] = 0;
					
					if( g_bDrawBackground )
					{
						nValue = *pDepth;
						nite::UserId label = *pLabels;

						int nColorID = label % NCOLORS;
						if( label == 0 )
							nColorID = NCOLORS;
						
						if(nValue != 0)
						{
							nHistValue = (UINT)(g_pDepthHist[nValue]);

							pDestImage[0] = (UINT)(nHistValue * Colors[nColorID][0]); 
							pDestImage[1] = (UINT)(nHistValue * Colors[nColorID][1]);
							pDestImage[2] = (UINT)(nHistValue * Colors[nColorID][2]);
							pDestImage[3] = 255;
						}
					}

					pDepth		+= 4;
					pLabels		+= 4;
					pDestImage	+= 4;
				}
				
				int pg = iXRes * 3;
				pDepth += pg;
				pLabels += pg;
				pDestImage += (texWidth - iXRes)*4+pg;
			}
		}
		else
		{
			memset( LPdest.pBits, 0, 4 * 2 * iXRes * iYRes );
		}
		DepthTex->UnlockRect(0);

		const nite::Array<nite::UserData>& aUsers = mUserFrame.getUsers();
		for( int iIdx = 0; iIdx < aUsers.getSize(); ++ iIdx )
		{
			const nite::UserData& rUser = aUsers[iIdx];
			if( rUser.isNew() )
			{
				g_UserTracker.startPoseDetection( rUser.getId(), nite::POSE_PSI );
			}
			else
			{
				const nite::PoseData& rPose = rUser.getPose( nite::POSE_PSI );
				if( rPose.isEntered() )
				{
					g_UserTracker.stopPoseDetection( rUser.getId(), nite::POSE_PSI );
					g_UserTracker.startSkeletonTracking( rUser.getId() );
				}

				const nite::Skeleton& rSkeleton = rUser.getSkeleton();
				if( rSkeleton.getState() == nite::SKELETON_TRACKED )
				{
					if( TrCount[iIdx] < 4 )
					{
						TrCount[iIdx]++;
						if( TrCount[iIdx] == 4 )
						{
							TrackingF = true;
							const nite::Point3f& rPos = rSkeleton.getJoint( nite::JOINT_TORSO ).getPosition();
							g_BP_Zero.x = rPos.x;
							g_BP_Zero.z = rPos.z;
							g_BP_Zero.y = float( rSkeleton.getJoint( nite::JOINT_LEFT_HIP ).getPosition().y + rSkeleton.getJoint( nite::JOINT_RIGHT_HIP ).getPosition().y ) / 2;
						}
					}

					PosCalc( rSkeleton, nite::JOINT_TORSO,			&BP_Vector[0] );
					PosCalc( rSkeleton, nite::JOINT_NECK,			&BP_Vector[1]);
					PosCalc( rSkeleton, nite::JOINT_HEAD,			&BP_Vector[2]);
					PosCalc( rSkeleton, nite::JOINT_LEFT_SHOULDER,	&BP_Vector[3]);
					PosCalc( rSkeleton, nite::JOINT_LEFT_ELBOW,		&BP_Vector[4]);
					PosCalc( rSkeleton, nite::JOINT_RIGHT_SHOULDER,	&BP_Vector[6]);
					PosCalc( rSkeleton, nite::JOINT_RIGHT_ELBOW,	&BP_Vector[7]);
					PosCalc( rSkeleton, nite::JOINT_LEFT_HIP,		&BP_Vector[9]);
					PosCalc( rSkeleton, nite::JOINT_LEFT_KNEE,		&BP_Vector[10]);
					PosCalc( rSkeleton, nite::JOINT_LEFT_FOOT,		&BP_Vector[11]);
					PosCalc( rSkeleton, nite::JOINT_RIGHT_HIP,		&BP_Vector[12]);
					PosCalc( rSkeleton, nite::JOINT_RIGHT_KNEE,		&BP_Vector[13]);
					PosCalc( rSkeleton, nite::JOINT_RIGHT_FOOT,		&BP_Vector[14]);
					PosCalc( rSkeleton, nite::JOINT_TORSO,			&BP_Vector[15]);
					PosCalc( rSkeleton, nite::JOINT_LEFT_HAND,		&BP_Vector[16]);
					PosCalc( rSkeleton, nite::JOINT_RIGHT_HAND,		&BP_Vector[17]);
					//PosCalc( rSkeleton, nite::XN_SKEL_LEFT_WRIST,	&BP_Vector[5]);
					//PosCalc( rSkeleton, nite::XN_SKEL_RIGHT_WRIST,	&BP_Vector[8]);

					BP_Vector[5] = BP_Vector[16];
					BP_Vector[8] = BP_Vector[17];

					BP_Vector[0].y = ( BP_Vector[9].y + BP_Vector[12].y ) / 2.0f;
					break;
				}
				else
				{
					TrCount[iIdx]=0;
				}
			}
		}
	}
}