/// <summary>
/// Handle new skeleton data
/// </summary>
void CSkeletonBasics::ProcessSkeleton()
{
    NUI_SKELETON_FRAME skeletonFrame = {0};

    HRESULT hr = m_pNuiSensor->NuiSkeletonGetNextFrame(0, &skeletonFrame);
    if ( FAILED(hr) )
    {
        return;
    }

    // smooth out the skeleton data
    m_pNuiSensor->NuiTransformSmooth(&skeletonFrame, NULL);

    // Endure Direct2D is ready to draw
    hr = EnsureDirect2DResources( );
    if ( FAILED(hr) )
    {
        return;
    }

    m_pRenderTarget->BeginDraw();
    m_pRenderTarget->Clear( );
    
    RECT rct;
    GetClientRect( GetDlgItem( m_hWnd, IDC_VIDEOVIEW ), &rct);
    int width = rct.right;
    int height = rct.bottom;

    for (int i = 0 ; i < NUI_SKELETON_COUNT; ++i)
    {
        NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;

        if (NUI_SKELETON_TRACKED == trackingState)
        {
            // We're tracking the skeleton, draw it
            DrawSkeleton(skeletonFrame.SkeletonData[i], width, height);
        }
        else if (NUI_SKELETON_POSITION_ONLY == trackingState)
        {
            // we've only received the center point of the skeleton, draw that
            D2D1_ELLIPSE ellipse = D2D1::Ellipse(
                SkeletonToScreen(skeletonFrame.SkeletonData[i].Position, width, height),
                g_JointThickness,
                g_JointThickness
                );

            m_pRenderTarget->DrawEllipse(ellipse, m_pBrushJointTracked);
        }
    }

    hr = m_pRenderTarget->EndDraw();

    // Device lost, need to recreate the render target
    // We'll dispose it now and retry drawing
    if (D2DERR_RECREATE_TARGET == hr)
    {
        hr = S_OK;
        DiscardDirect2DResources();
    }
}
Esempio n. 2
0
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{
    if (m_hWnd)
    {
        HRESULT hr = EnsureDirect2DResources();

        DetectionResult nEngaged[6] = { DetectionResult_Unknown };

        int width = 0;
        int height = 0;
        if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper)
        {
            m_pRenderTarget->BeginDraw();
            m_pRenderTarget->Clear();

            RECT rct;
            GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
            width = rct.right;
            height = rct.bottom;

            for (int i = 0; i < nBodyCount; ++i)
            {
                nEngaged[i] = DetectionResult_Maybe;

                IBody* pBody = ppBodies[i];
                if (pBody)
                {
                    BOOLEAN bTracked = false;
                    hr = pBody->get_IsTracked(&bTracked);

                    // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。
                    hr = pBody->get_Engaged( &nEngaged[i] );
                    // 以下はまだ使えないようだ
                    //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]);

                    if (SUCCEEDED(hr) && bTracked)
                    {
                        Joint joints[JointType_Count];
                        D2D1_POINT_2F jointPoints[JointType_Count];
                        HandState leftHandState = HandState_Unknown;
                        HandState rightHandState = HandState_Unknown;

                        pBody->get_HandLeftState(&leftHandState);
                        pBody->get_HandRightState(&rightHandState);

                        hr = pBody->GetJoints(_countof(joints), joints);
                        if (SUCCEEDED(hr))
                        {
                            for (int j = 0; j < _countof(joints); ++j)
                            {
                                jointPoints[j] = BodyToScreen(joints[j].Position, width, height);
                            }

                            DrawBody(joints, jointPoints);

                            // ここに頭部に丸を描いて、ボディ番号を表示
                            DrawHead(jointPoints[JointType_Head], i, nEngaged[i]);

                            DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
                            DrawHand(rightHandState, jointPoints[JointType_HandRight]);
                        }
                    }
                }
            }

            hr = m_pRenderTarget->EndDraw();

            // Device lost, need to recreate the render target
            // We'll dispose it now and retry drawing
            if (D2DERR_RECREATE_TARGET == hr)
            {
                hr = S_OK;
                DiscardDirect2DResources();
            }
        }

        if (!m_nStartTime)
        {
            m_nStartTime = nTime;
        }

        double fps = 0.0;

        LARGE_INTEGER qpcNow = {0};
        if (m_fFreq)
        {
            if (QueryPerformanceCounter(&qpcNow))
            {
                if (m_nLastCounter)
                {
                    m_nFramesSinceUpdate++;
                    fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
                }
            }
        }

        WCHAR szStatusMessage[128] ;
        StringCchPrintf( szStatusMessage, _countof(szStatusMessage),
                         L" FPS = %0.2f  Time = %I64d width:%d height:%d",
                         fps, (nTime - m_nStartTime), width, height);

        if (SetStatusMessage(szStatusMessage, 1000, false))
        {
            m_nLastCounter = qpcNow.QuadPart;
            m_nFramesSinceUpdate = 0;
        }
    }
}
	void BreathingClass::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
	{
		double testVal = 0;

		if (m_hWnd)
		{
			HRESULT hr = EnsureDirect2DResources();

			if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper)
			{
				m_pRenderTarget->BeginDraw();
				m_pRenderTarget->Clear();

				RECT rct;
				GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
				int width = rct.right;
				int height = rct.bottom;



				for (int i = 0; i < nBodyCount; ++i)
				{
					IBody* pBody = ppBodies[i];
					if (pBody)
					{
						BOOLEAN bTracked = false;
						hr = pBody->get_IsTracked(&bTracked);

						if (SUCCEEDED(hr) && bTracked)
						{
							Joint joints[JointType_Count];
							D2D1_POINT_2F jointPoints[JointType_Count];
							HandState leftHandState = HandState_Unknown;
							HandState rightHandState = HandState_Unknown;


							pBody->get_HandLeftState(&leftHandState);
							pBody->get_HandRightState(&rightHandState);



							hr = pBody->GetJoints(_countof(joints), joints);
							if (SUCCEEDED(hr))
							{
								for (int j = 0; j < _countof(joints); ++j)
								{
									jointPoints[j] = BodyToScreen(joints[j].Position, width, height);
								}

								//DrawHand(leftHandState, jointPoints[JointType_ShoulderLeft]);

								float shoulderLeft = jointPoints[JointType_ShoulderLeft].y;
								float shoulderRight = jointPoints[JointType_ShoulderRight].y;
								float shoulderSpine = jointPoints[JointType_SpineShoulder].y;

								// average shoulder height
								testVal = (shoulderLeft + shoulderRight) / 2.0f * 100;

								BreathUpdate2(testVal);
							}
						}
					}
				}

				hr = m_pRenderTarget->EndDraw();

				// Device lost, need to recreate the render target
				// We'll dispose it now and retry drawing
				if (D2DERR_RECREATE_TARGET == hr)
				{
					hr = S_OK;
					DiscardDirect2DResources();
				}
			}

			if (!m_nStartTime)
			{
				m_nStartTime = nTime;
			}


			double fps = 0;

			if (GetBreathingIn())
				fps = 1.0f;
			else
				fps = 0.0f;

			LARGE_INTEGER qpcNow = { 0 };
			if (m_fFreq)
			{
				if (QueryPerformanceCounter(&qpcNow))
				{
					if (m_nLastCounter)
					{
						m_nFramesSinceUpdate++;
						// fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
					}
				}
			}

			WCHAR szStatusMessage[64];
			//StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f    Time = %I64d", fps, (nTime - m_nStartTime));

		
		}
	}
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{
   
	//Declare a struct to collect data
	struct FRAME_RESUME actual_frame;
	
	if (m_hWnd)
    {
        HRESULT hr = EnsureDirect2DResources();

        if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper)
        {
            m_pRenderTarget->BeginDraw();
            m_pRenderTarget->Clear();

            RECT rct;
            GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
            int width = rct.right;
            int height = rct.bottom;

            for (int i = 0; i < nBodyCount; ++i)
            {
                IBody* pBody = ppBodies[i];
                if (pBody)
                {
                    BOOLEAN bTracked = false;
                    hr = pBody->get_IsTracked(&bTracked);

					//If the person is tracked we assign true '1', else we assign false '0'
					actual_frame.person[i].isTracked = bTracked;

					//If the person is tracked
                    if (SUCCEEDED(hr) && bTracked)
                    {
                        Joint joints[JointType_Count]; 
                        D2D1_POINT_2F jointPoints[JointType_Count];
                        HandState leftHandState = HandState_Unknown;
                        HandState rightHandState = HandState_Unknown;

                        pBody->get_HandLeftState(&leftHandState);
                        pBody->get_HandRightState(&rightHandState);

                        hr = pBody->GetJoints(_countof(joints), joints);
                        if (SUCCEEDED(hr))
                        {
                            
							for (int j = 0; j < _countof(joints); ++j)
                            {
                                jointPoints[j] = BodyToScreen(joints[j].Position, width, height);	

								//If current joint is not tracked
								if (joints[j].TrackingState == TrackingState_NotTracked)
								{
									actual_frame.person[i].body.joint[j].isTracked = 0;
									actual_frame.person[i].body.joint[j].coordinate3D.X = 0;
									actual_frame.person[i].body.joint[j].coordinate3D.Y = 0;
									actual_frame.person[i].body.joint[j].coordinate3D.Z = 0;
									actual_frame.person[i].body.joint[j].coordinate2D.X = 0;
									actual_frame.person[i].body.joint[j].coordinate2D.Y = 0;
								}

								else
								{
									if (joints[j].TrackingState == TrackingState_Tracked)
									{
										actual_frame.person[i].body.joint[j].isTracked = 1;
									}
									else
									{
										if (joints[j].TrackingState == TrackingState_Inferred)
										{
											actual_frame.person[i].body.joint[j].isTracked = 2;
										}
										else
										{
											actual_frame.person[i].body.joint[j].isTracked = 3;
										}
									}

									actual_frame.person[i].body.joint[j].coordinate3D.X = joints[j].Position.X;
									actual_frame.person[i].body.joint[j].coordinate3D.Y = joints[j].Position.Y;
									actual_frame.person[i].body.joint[j].coordinate3D.Z = joints[j].Position.Z;
									actual_frame.person[i].body.joint[j].coordinate2D.X = jointPoints[j].x;
									actual_frame.person[i].body.joint[j].coordinate2D.Y = jointPoints[j].y;
								}

                            }						
											
							
                            DrawBody(joints, jointPoints);

                            DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
                            DrawHand(rightHandState, jointPoints[JointType_HandRight]);
                        }											

						//Gesture values

						//Get body gesture
						//BodyGestureIdentification(actual_frame.person[i], &actual_frame.person[i].bodyGesture.type, &actual_frame.person[i].bodyGesture.parameter1);
						BodyPoseIdentification(actual_frame.person[i], &actual_frame.person[i].bodyPose.type);
						//Get hand gestures
						actual_frame.person[i].leftHandGesture.type = leftHandState;
						actual_frame.person[i].leftHandGesture.parameter1 = 0;
						actual_frame.person[i].rightHandGesture.type = rightHandState;
						actual_frame.person[i].rightHandGesture.parameter1 = 0;


                    }

					//If the person is not tracked put all its values in cero
					else
					{
						//All joints go to zero
						for (int j = 0; j <25; j++)
						{
							actual_frame.person[i].body.joint[j].isTracked = 0;
							actual_frame.person[i].body.joint[j].coordinate3D.X = 0;
							actual_frame.person[i].body.joint[j].coordinate3D.Y = 0;
							actual_frame.person[i].body.joint[j].coordinate3D.Z = 0;
							actual_frame.person[i].body.joint[j].coordinate2D.X = 0;
							actual_frame.person[i].body.joint[j].coordinate2D.Y = 0;
						}

						//All gestures go to zero
						actual_frame.person[i].bodyPose.type = 0;
						actual_frame.person[i].bodyGesture.type = 0;
						actual_frame.person[i].bodyGesture.parameter1 = 0;
						actual_frame.person[i].leftHandGesture.type = 0;
						actual_frame.person[i].leftHandGesture.parameter1 = 0;
						actual_frame.person[i].rightHandGesture.type = 0;
						actual_frame.person[i].rightHandGesture.parameter1 = 0;

					}


                }

           }

		    hr = m_pRenderTarget->EndDraw();

			// Device lost, need to recreate the render target
            // We'll dispose it now and retry drawing
            if (D2DERR_RECREATE_TARGET == hr)
            {
                hr = S_OK;
                DiscardDirect2DResources();
            }
        }

        if (!m_nStartTime)
        {
            m_nStartTime = nTime;
        }

        double fps = 0.0;

        LARGE_INTEGER qpcNow = {0};
        if (m_fFreq)
        {
            if (QueryPerformanceCounter(&qpcNow))
            {
                if (m_nLastCounter)
                {
                    m_nFramesSinceUpdate++;
                    fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
                }
            }
        }

        WCHAR szStatusMessage[64];
        StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f    Time = %I64d", fps, (nTime - m_nStartTime));

        if (SetStatusMessage(szStatusMessage, 1000, false))
        {
            m_nLastCounter = qpcNow.QuadPart;
            m_nFramesSinceUpdate = 0;
        }
    }

	//Collected results are sent to a file
	write_a_frame_resume_in_file(actual_frame);

}
/// <summary>
/// Handle new skeleton data
/// </summary>
void CSkeletonBasics::ProcessSkeleton()
{
    NUI_SKELETON_FRAME skeletonFrame = {0};

    HRESULT hr = m_pNuiSensor->NuiSkeletonGetNextFrame(0, &skeletonFrame);
    if ( FAILED(hr) )
    {
        return;
    }

    // smooth out the skeleton data
    m_pNuiSensor->NuiTransformSmooth(&skeletonFrame, NULL);

    // Endure Direct2D is ready to draw
    hr = EnsureDirect2DResources( );
    if ( FAILED(hr) )
    {
        return;
    }

    m_pRenderTarget->BeginDraw();
    m_pRenderTarget->Clear( );

    RECT rct;
    GetClientRect( GetDlgItem( m_hWnd, IDC_VIDEOVIEW ), &rct);
    int width = rct.right;
    int height = rct.bottom;

    for (int i = 0 ; i < NUI_SKELETON_COUNT; ++i)
    {
        NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;

        if (NUI_SKELETON_TRACKED == trackingState)
        {
            // We're tracking the skeleton, draw it
            DrawSkeleton(skeletonFrame.SkeletonData[i], width, height);
			
			// Write commands into keyboard buffer depending on gesture
			if ((skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y > skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y) &&
				(skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y < skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_LEFT].y))
			{
				SetStatusMessage(L"rechte Hand ist oben");
				// Write "D" in keyboard buffer
				//keybd_event(0x44, 0x20, 0, 0); //1. Ascii Tabelle 2. Scan Code

				unsigned char berndmessage[] = "d";
				port1.sendArray(berndmessage, 1);
				unsigned char heinrichmessage[] = "d";
				port2.sendArray2(heinrichmessage, 1);
			}

			if ((skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y > skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_LEFT].y) &&
				(skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y < skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y))
			{
				// Write "A" in keyboard buffer
				SetStatusMessage(L"linke Hand ist oben");
				// keybd_event(0x41, 0x1E, 0, 0);

				unsigned char berndmessage[] = "a";
				port1.sendArray(berndmessage, 1);
				unsigned char heinrichmessage[] = "a";
				port2.sendArray2(heinrichmessage, 1);
			}

			if ((skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y > skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_LEFT].y) &&
				(skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y > skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y))
			{
				// Write "W" in keyboard buffer
				SetStatusMessage(L"beide Hände sind oben");
				//putchar('w');
				//keybd_event( 0x77, 0, KEYEVENTF_EXTENDEDKEY | 0,0 ); --> gibt immerhin die keys aus
				//keybd_event(0x57, 0x11, 0, 0 ); //Fährt!!

				unsigned char berndmessage[] = "w";
				port1.sendArray(berndmessage, 1);
				unsigned char heinrichmessage[] = "w";
				port2.sendArray2(heinrichmessage, 1);
			}

			if ((skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y < skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_KNEE_LEFT].y) &&
				(skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y < skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_KNEE_RIGHT].y))
			{
				// Write "S" in keyboard buffer
				SetStatusMessage(L"beide Hände unterhalb der Knie");
				//keybd_event(0x53, 0x1F, 0, 0);

				unsigned char berndmessage[] = "s";
				port1.sendArray(berndmessage, 1);
				unsigned char heinrichmessage[] = "s";
				port2.sendArray2(heinrichmessage, 1);
			}

			if ((skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y > skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_KNEE_LEFT].y) &&
				(skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y > skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_KNEE_RIGHT].y) &&
				(skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y < skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_LEFT].y) &&
				(skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y < skeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y))
			{
				// Write "P" in keyboard buffer
				SetStatusMessage(L"beide Hände sind unten");
				//keybd_event(0x4F, 0x18, 0, 0);

				unsigned char berndmessage[] = "p";
				port1.sendArray(berndmessage, 1);
				unsigned char heinrichmessage[] = "p";
				port2.sendArray2(heinrichmessage, 1);
			}

        }
        else if (NUI_SKELETON_POSITION_ONLY == trackingState)
        {
            // we've only received the center point of the skeleton, draw that
            D2D1_ELLIPSE ellipse = D2D1::Ellipse(
                SkeletonToScreen(skeletonFrame.SkeletonData[i].Position, width, height),
                g_JointThickness,
                g_JointThickness
                );

            m_pRenderTarget->DrawEllipse(ellipse, m_pBrushJointTracked);
        }
    }



	/*
	// ---> ANFANG zweite Person
	for (int j = 0; j < NUI_SKELETON_COUNT; ++j)
	{
		NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[j].eTrackingState;

		if (NUI_SKELETON_TRACKED == trackingState)
		{
			// We're tracking the skeleton, draw it
			DrawSkeleton(skeletonFrame.SkeletonData[j], width, height);

			// Write commands into keyboard buffer depending on gesture
			if ((skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y > skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y) &&
				(skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y < skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_LEFT].y))
			{
				SetStatusMessage(L"zweite Person: rechte Hand ist oben");
				// Write "K" in keyboard buffer
				keybd_event(0x4B, 0x25, 0, 0); //1. Ascii Tabelle 2. Scan Code
			}

			if ((skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y > skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_LEFT].y) &&
				(skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y < skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y))
			{
				// Write "H" in keyboard buffer
				SetStatusMessage(L"zweite Person: linke Hand ist oben");
				keybd_event(0x48, 0x23, 0, 0);
			}

			if ((skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y > skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_LEFT].y) &&
				(skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y > skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y))
			{
				// Write "U" in keyboard buffer
				SetStatusMessage(L"zweite Person: beide Hände sind oben");
				keybd_event(0x55, 0x16, 0, 0); //Fährt!!
			}

			if ((skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y < skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_KNEE_LEFT].y) &&
				(skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y < skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_KNEE_RIGHT].y))
			{
				// Write "J" in keyboard buffer
				SetStatusMessage(L"zweite Person: beide Hände unterhalb der Knie");
				keybd_event(0x4A, 0x24, 0, 0);
			}

			if ((skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y > skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_KNEE_LEFT].y) &&
				(skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y > skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_KNEE_RIGHT].y) &&
				(skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT].y < skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_LEFT].y) &&
				(skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT].y < skeletonFrame.SkeletonData[j].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_RIGHT].y))
			{
				// Write "P" in keyboard buffer
				SetStatusMessage(L"zweite Person: beide Hände sind unten");
				keybd_event(0x50, 0x19, 0, 0);
			}

		}
		else if (NUI_SKELETON_POSITION_ONLY == trackingState)
		{
			// we've only received the center point of the skeleton, draw that
			D2D1_ELLIPSE ellipse = D2D1::Ellipse(
				SkeletonToScreen(skeletonFrame.SkeletonData[j].Position, width, height),
				g_JointThickness,
				g_JointThickness
				);

			m_pRenderTarget->DrawEllipse(ellipse, m_pBrushJointTracked);
		}
	}
	// --> ENDE zweite Person
	*/



    hr = m_pRenderTarget->EndDraw();

    // Device lost, need to recreate the render target
    // We'll dispose it now and retry drawing
    if (D2DERR_RECREATE_TARGET == hr)
    {
        hr = S_OK;
        DiscardDirect2DResources();
    }
}
Esempio n. 6
0
HRESULT Kinect::initialize()
{
	HRESULT hr;
	bool result;
	//prepare the cwnd
	cWnd.m_hWnd = hWnd;

	//init Direct2D
	D2D1CreateFactory(D2D1_FACTORY_TYPE_MULTI_THREADED, &d2DFactory);

	//init faceTracker
	faceTracker = new FaceTracking(hWnd, d2DFactory, cWnd);

	//the three events that the kinect will throw
	nextDepthFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
	nextColorFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL );
	nextSkeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL );

	drawDepth = new ImageDraw();
	result = drawDepth->Initialize( GetDlgItem( hWnd, 1011), d2DFactory, 320, 240, 320 * 4);
	if (!result )
	{
		// Display error regarding the depth.
	}

	if ( !result )
	{
		// Display Error regarding the color.
	}

	// recource ensurement
	EnsureDirect2DResources();

	//Flags for the kinect, usage is one line under the code.
	DWORD nuiFlags = NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR;

	hr = globalNui->NuiInitialize(nuiFlags);
	//If the skeletal part cannot be initialized, just initialize everything else.
	if ( E_NUI_SKELETAL_ENGINE_BUSY == hr)
	{
		nuiFlags = NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_COLOR;
		hr = globalNui->NuiInitialize(nuiFlags);
	}

	if ( FAILED(hr))
	{
		// Some error which result in the software being unable to properly initialize the kinect.
		// of nui init error.
	}

	if ( HasSkeletalEngine( globalNui ) )
	{
		hr = globalNui->NuiSkeletonTrackingEnable( nextSkeletonEvent, skeletonTrackingFlags);
		if( FAILED(hr))
		{
			// Error about skeleton tracking.
		}
	}

	// skeletal viewer error

	//Initialize the color stream
	hr = globalNui->NuiImageStreamOpen(
		NUI_IMAGE_TYPE_COLOR,
		NUI_IMAGE_RESOLUTION_640x480,
		0,
		2,
		nextColorFrameEvent,
		&videoStreamHandle);

	// error toevoegen voor de image stream
	if ( FAILED(hr) )
	{

	}

	//Initialize the depth stream.
	hr = globalNui->NuiImageStreamOpen( 
		HasSkeletalEngine(globalNui) ? NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX : NUI_IMAGE_TYPE_DEPTH,
		NUI_IMAGE_RESOLUTION_320x240,
		depthStreamFlags,
		2,
		nextDepthFrameEvent,
		&depthStreamHandle );

	// error toevoegen voor de depth stream 
	if ( FAILED(hr) )
	{
		hr = globalNui->NuiImageStreamOpen( 
			HasSkeletalEngine(globalNui) ? NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX : NUI_IMAGE_TYPE_DEPTH,
			NUI_IMAGE_RESOLUTION_320x240,
			NUI_IMAGE_FRAME_FLAG_NONE,
			2,
			nextDepthFrameEvent,
			&depthStreamHandle );
	}
	//Init the Mutex system ( to prevent read and write on the same object simultaniously )
	mutex = CreateMutex(NULL, FALSE,L"D2DBitMapProtector");

	//FaceTracker Init
	faceTracker->init(mutex);
	// Start the processing thread
	treadNuiProcessStop = CreateEvent( NULL, FALSE, FALSE, NULL );
	treadNuiProcess = CreateThread ( NULL, 0, ProcessThread, this, 0, NULL);
	//start the facetracker thread
	faceTracker->startThread();
	return hr;
}
Esempio n. 7
0
/// <summary>
/// Handle new skeleton data
/// </summary>
void CDataCollection::ProcessSkeleton()
{
	NUI_SKELETON_FRAME skeletonFrame = {0};

	HRESULT hr = m_pNuiSensor->NuiSkeletonGetNextFrame(0, &skeletonFrame);

	
	if(m_bSaveSkeStart)
	{
		int framecount = m_pSkeData->GetFrameSaved();
		::SetDlgItemInt(m_hWnd, static_cast<int>(IDC_FRAME_SHOW),framecount, FALSE );
	}
	
	if ( FAILED(hr) )
	{
		return;
	}

	// smooth out the skeleton data
	m_pNuiSensor->NuiTransformSmooth(&skeletonFrame, NULL);

	//m_pSkeData->AddOneFrame(skeletonFrame);
	for (int i = 0 ; i < NUI_SKELETON_COUNT; i++)
	{
		NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;
		if (NUI_SKELETON_TRACKED ==trackingState)
		{
			m_pSkeData->AddOneFrame(skeletonFrame.SkeletonData[i].SkeletonPositions);
			break;
		}
	}

	if(m_bSaveSkeStart) {
	if (m_pSkeData->SavingIsStopped()) {
	
	    KillTimer (m_hWnd, TIMER_SEC) ;
	    StringCchPrintf( statusmsg, cStatusMessageMaxLen, L"Skeleton video saved to %s", skefname);
	    MessageBox(m_hWnd, statusmsg, L"File saved!", MB_OK); 

		
		m_bSaveSkeStart=false;
	}
	}
	// Endure Direct2D is ready to draw
	hr = EnsureDirect2DResources( );
	if ( FAILED(hr) )
	{
		return;
	}

	m_pRenderTarget->BeginDraw();
	m_pRenderTarget->Clear( );

	RECT rct;
	GetClientRect( GetDlgItem( m_hWnd, IDC_SKELETON ), &rct);
	int width = rct.right;
	int height = rct.bottom;
//	WCHAR statusMessage[cStatusMessageMaxLen];
	
	
	int i;
	//currentframetime=GetTickCount();
	for ( i = 0 ; i < NUI_SKELETON_COUNT; ++i)
	{
		NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState;
		

		if (NUI_SKELETON_TRACKED == trackingState)
		{
			// We're tracking the skeleton, draw it
			DrawSkeleton(skeletonFrame.SkeletonData[i], width, height);
			break;
		}
		else if (NUI_SKELETON_POSITION_ONLY == trackingState)
		{
	
			
			// we've only received the center point of the skeleton, draw that
			D2D1_ELLIPSE ellipse = D2D1::Ellipse(
				SkeletonToScreen(skeletonFrame.SkeletonData[i].Position, width, height),
				g_JointThickness,
				g_JointThickness
				);

			m_pRenderTarget->DrawEllipse(ellipse, m_pBrushJointTracked);
		}

		

		
	}
	
	
	hr = m_pRenderTarget->EndDraw();

	// Device lost, need to recreate the render target
	// We'll dispose it now and retry drawing
	if (D2DERR_RECREATE_TARGET == hr)
	{
		hr = S_OK;
		DiscardDirect2DResources();
	}
	
}
Esempio n. 8
0
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{
    if (m_hWnd)
    {
        HRESULT hr = EnsureDirect2DResources();

        if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper)
        {
            m_pRenderTarget->BeginDraw();
            m_pRenderTarget->Clear();

            RECT rct;
            GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
            int width = rct.right;
            int height = rct.bottom;

            for (int i = 0; i < nBodyCount; ++i)
            {
                IBody* pBody = ppBodies[i];
                if (pBody)
                {
                    BOOLEAN bTracked = false;
                    hr = pBody->get_IsTracked(&bTracked);

                    if (SUCCEEDED(hr) && bTracked)
                    {
                        Joint joints[JointType_Count]; 
                        D2D1_POINT_2F jointPoints[JointType_Count];
                        HandState leftHandState = HandState_Unknown;
                        HandState rightHandState = HandState_Unknown;

                        pBody->get_HandLeftState(&leftHandState);
                        pBody->get_HandRightState(&rightHandState);

                        hr = pBody->GetJoints(_countof(joints), joints);
                        if (SUCCEEDED(hr))
                        {
                            for (int j = 0; j < _countof(joints); ++j)
                            {
                                jointPoints[j] = BodyToScreen(joints[j].Position, width, height);
                            }

                            DrawBody(joints, jointPoints);

                            DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
                            DrawHand(rightHandState, jointPoints[JointType_HandRight]);
                        }
						
                    }
                }
            }

            hr = m_pRenderTarget->EndDraw();

            // Device lost, need to recreate the render target
            // We'll dispose it now and retry drawing
            if (D2DERR_RECREATE_TARGET == hr)
            {
                hr = S_OK;
                DiscardDirect2DResources();
            }
        }
		FILE *fp;
		if ((fp = fopen("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\speech.txt", "r")) != NULL) {
			if ((fp = fopen("body.txt", "r")) != NULL) {
				std::ifstream ifsa("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\speech.txt");
				std::string linea;
				int si = 0;
				while (std::getline(ifsa, linea)) {
					si++;
				}
				std::ifstream ifsb("body.txt");
				std::string lineb;
				int bi = 0;
				while (std::getline(ifsb, lineb)) {
					bi++;
				}
				if (si > bi) {
					isWrite = true;
					std::ofstream ofs("body.txt", std::ios::app);
					for (int j = 0; j < si - bi; j++) {
						ofs << "start" << std::endl;
					}
				}
			}
		}
		if ((fp = fopen("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\delete.txt", "r")) != NULL) {
			if ((fp = fopen("delete.txt", "r")) != NULL) {
				std::ifstream ifsa("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\delete.txt");
				std::string linea;
				int si = 0;
				while (std::getline(ifsa, linea)) {
					si++;
				}
				std::ifstream ifsb("delete.txt");
				std::string lineb;
				int bi = 0;
				while (std::getline(ifsb, lineb)) {
					bi++;
				}
				if (si > bi) {
					system("ruby C:\\Users\\tuchiyama\\Documents\\odorimming\\make_html.rb undo");
					std::ofstream ofs("delete.txt", std::ios::app);
					for (int j = 0; j < si - bi; j++) {
						ofs << "delete" << std::endl;
					}
				}
			}
		}

        if (!m_nStartTime)
        {
            m_nStartTime = nTime;
        }

        double fps = 0.0;

        LARGE_INTEGER qpcNow = {0};
        if (m_fFreq)
        {
            if (QueryPerformanceCounter(&qpcNow))
            {
                if (m_nLastCounter)
                {
                    m_nFramesSinceUpdate+=1;
                    fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
                }
            }
        }

        WCHAR szStatusMessage[64];
        StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f    Time = %I64d", fps, (nTime - m_nStartTime));

        if (SetStatusMessage(szStatusMessage, 1000, false))
        {
            m_nLastCounter = qpcNow.QuadPart ;
            m_nFramesSinceUpdate = 0;
        }
    }
}