void CColorBasics::Detect(IBody* pBody) { if (pBody == NULL){ return; } Joint joints[JointType_Count]; D2D1_POINT_2F pos; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); HRESULT hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { Joint hand = joints[JointType_HandLeft]; Joint Shoulder = joints[JointType_ShoulderLeft]; if (leftHandState == HandState_Open && (Shoulder.Position.Z-hand.Position.Z) > 0.4) { pos = BodyToScreen(hand.Position); m_pDrawColor->DrawHand(joints, pos); if (++m_nDetectCount > 20) { // Joint thumb = joints[JointType_ThumbLeft]; if (fabs(thumb.Position.X - hand.Position.X) < 0.01) { m_nCommandIndex++; if (m_nCommandIndex>3){ m_nCommandIndex = 0; } } m_pDrawColor->DrawCommand(pos, m_nCommandIndex); } } else { m_nDetectCount = 0; } } return; }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { HRESULT hr = EnsureDirect2DResources(); DetectionResult nEngaged[6] = { DetectionResult_Unknown }; int width = 0; int height = 0; if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper) { m_pRenderTarget->BeginDraw(); m_pRenderTarget->Clear(); RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); width = rct.right; height = rct.bottom; for (int i = 0; i < nBodyCount; ++i) { nEngaged[i] = DetectionResult_Maybe; IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。 hr = pBody->get_Engaged( &nEngaged[i] ); // 以下はまだ使えないようだ //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } DrawBody(joints, jointPoints); // ここに頭部に丸を描いて、ボディ番号を表示 DrawHead(jointPoints[JointType_Head], i, nEngaged[i]); DrawHand(leftHandState, jointPoints[JointType_HandLeft]); DrawHand(rightHandState, jointPoints[JointType_HandRight]); } } } } hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; DiscardDirect2DResources(); } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[128] ; StringCchPrintf( szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d width:%d height:%d", fps, (nTime - m_nStartTime), width, height); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart; m_nFramesSinceUpdate = 0; } } }
void BreathingClass::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { double testVal = 0; if (m_hWnd) { HRESULT hr = EnsureDirect2DResources(); if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper) { m_pRenderTarget->BeginDraw(); m_pRenderTarget->Clear(); RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); int width = rct.right; int height = rct.bottom; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } //DrawHand(leftHandState, jointPoints[JointType_ShoulderLeft]); float shoulderLeft = jointPoints[JointType_ShoulderLeft].y; float shoulderRight = jointPoints[JointType_ShoulderRight].y; float shoulderSpine = jointPoints[JointType_SpineShoulder].y; // average shoulder height testVal = (shoulderLeft + shoulderRight) / 2.0f * 100; BreathUpdate2(testVal); } } } } hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; DiscardDirect2DResources(); } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0; if (GetBreathingIn()) fps = 1.0f; else fps = 0.0f; LARGE_INTEGER qpcNow = { 0 }; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; // fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; //StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); } }
/// <summary> /// Handle new depth and color data /// <param name="nTime">timestamp of frame</param> /// <param name="pDepthBuffer">pointer to depth frame data</param> /// <param name="nDepthWidth">width (in pixels) of input depth image data</param> /// <param name="nDepthHeight">height (in pixels) of input depth image data</param> /// <param name="pColorBuffer">pointer to color frame data</param> /// <param name="nColorWidth">width (in pixels) of input color image data</param> /// <param name="nColorHeight">height (in pixels) of input color image data</param> /// <param name="pBodyIndexBuffer">pointer to body index frame data</param> /// <param name="nBodyIndexWidth">width (in pixels) of input body index data</param> /// <param name="nBodyIndexHeight">height (in pixels) of input body index data</param> /// </summary> void CCoordinateMappingBasics::ProcessFrame(INT64 nTime, const UINT16* pDepthBuffer, int nDepthWidth, int nDepthHeight, const RGBQUAD* pColorBuffer, int nColorWidth, int nColorHeight, const BYTE* pBodyIndexBuffer, int nBodyIndexWidth, int nBodyIndexHeight, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart; m_nFramesSinceUpdate = 0; } } // Make sure we've received valid data if (m_pCoordinateMapper && m_pColorCoordinates && m_pOutputRGBX && pDepthBuffer && (nDepthWidth == cDepthWidth) && (nDepthHeight == cDepthHeight) && pColorBuffer && (nColorWidth == cColorWidth) && (nColorHeight == cColorHeight) && pBodyIndexBuffer && (nBodyIndexWidth == cDepthWidth) && (nBodyIndexHeight == cDepthHeight)) { HRESULT hr = m_pCoordinateMapper->MapDepthFrameToColorSpace(nDepthWidth * nDepthHeight, (UINT16*)pDepthBuffer,nDepthWidth * nDepthHeight, m_pColorCoordinates); if (SUCCEEDED(hr)) { RGBQUAD c_green = {0, 255, 0}; // loop over pixel of the output for (int depthIndex = 0; depthIndex < (nDepthWidth * nDepthHeight); ++depthIndex) { // default setting source to copy from the background pixel const RGBQUAD* pSrc = (m_pBackgroundRGBX) ? (m_pBackgroundRGBX + depthIndex) : &c_green; BYTE player = pBodyIndexBuffer[depthIndex]; // if we're tracking a player for the current pixel, draw from the color camera if (player != 0xff) { // retrieve the depth to color mapping for the current depth pixel ColorSpacePoint colorPoint = m_pColorCoordinates[depthIndex]; // make sure the depth pixel maps to a valid point in color space int colorX = (int)(floor(colorPoint.X + 0.5)); int colorY = (int)(floor(colorPoint.Y + 0.5)); if ((colorX >= 0) && (colorX < nColorWidth) && (colorY >= 0) && (colorY < nColorHeight)) { // calculate index into color array int colorIndex = colorX + (colorY * nColorWidth); // set source for copy to the color pixel pSrc = m_pColorRGBX + colorIndex; } } // write output m_pOutputRGBX[depthIndex] = *pSrc; } // Draw the data with Direct2D m_pDrawCoordinateMapping->Draw(reinterpret_cast<BYTE*>(m_pOutputRGBX), cDepthWidth * cDepthHeight * sizeof(RGBQUAD)); if (m_bSaveScreenshot) { WCHAR szScreenshotPath[MAX_PATH]; // Retrieve the path to My Photos GetScreenshotFileName(szScreenshotPath, _countof(szScreenshotPath)); // Write out the bitmap to disk HRESULT hr = SaveBitmapToFile(reinterpret_cast<BYTE*>(m_pOutputRGBX), nDepthWidth, nDepthHeight, sizeof(RGBQUAD) * 8, szScreenshotPath); WCHAR szStatusMessage[64 + MAX_PATH]; if (SUCCEEDED(hr)) { // Set the status bar to show where the screenshot was saved StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Screenshot saved to %s", szScreenshotPath); } else { StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Failed to write screenshot to %s", szScreenshotPath); } SetStatusMessage(szStatusMessage, 5000, true); // toggle off so we don't save a screenshot again next frame m_bSaveScreenshot = false; } } } D2D1_POINT_2F center; center.x = 400.0; center.y = 100.0; int width = 0; int height = 0; if (m_pCoordinateMapper) { RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); width = rct.right; height = rct.bottom; DWORD clipedge = 0; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; HRESULT hr = pBody->get_IsTracked(&bTracked); // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。 //hr = pBody->get_Engaged(&nEngaged[i]); // 以下はまだ使えないようだ //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]); pBody->get_ClippedEdges(&clipedge); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } m_pDrawCoordinateMapping->DrawBody(joints, jointPoints); // ここに頭部に丸を描いて、ボディ番号を表示 m_pDrawCoordinateMapping->DrawHead(jointPoints[JointType_Head], i, clipedge/*, nEngaged[i]*/); m_pDrawCoordinateMapping->DrawHand(leftHandState, jointPoints[JointType_HandLeft]); m_pDrawCoordinateMapping->DrawHand(rightHandState, jointPoints[JointType_HandRight]); // 手先がある領域にきたら実行 // ボタンのような // 現状、複数人が認識されても実行するので、本来は最初に認識された一人のみにする必要がある。 float xy[2] = { 0.0 }; xy[0] = jointPoints[JointType_HandTipRight].x - center.x; xy[1] = jointPoints[JointType_HandTipRight].y - center.y; m_nButton = 0; if (sqrt(xy[0] * xy[0] + xy[1] * xy[1]) < 50.0) { m_nButton = 1; } m_pDrawCoordinateMapping->DrawButton(center, m_nButton); } } } } m_pDrawCoordinateMapping->EndDraw(); } }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void testApp::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { HRESULT hr; int trackedBodies = 0; if (m_pCoordinateMapper) { //IBody* pBodyToTrack = NULL; //IBody* pBody2ToTrack = NULL; //UINT64 trackingId; //for (int i = 0; i < nBodyCount; ++i) //{ // IBody* pBody = ppBodies[i]; // if (lastBodyTrackingId == NULL || lastBody2TrackingId == NULL) // { // //Init a new body tracking // if (pBody) { // BOOLEAN bTracked = false; // hr = pBody->get_IsTracked(&bTracked); // if (SUCCEEDED(hr) && bTracked) { // ofLogNotice("Body is tracked"); // if(lastBodyTrackingId == NULL) // hr = pBody->get_TrackingId(&lastBodyTrackingId); // else // hr = pBody->get_TrackingId(&lastBody2TrackingId); // if (SUCCEEDED(hr)) { // ofLogNotice("Found body to track"); // pBodyToTrack = pBody; // } // break; // } // } // } // else { // //Some body is already tracked // if (pBody) { // BOOLEAN bTracked = false; // hr = pBody->get_IsTracked(&bTracked); // if (SUCCEEDED(hr) && bTracked) { // pBody->get_TrackingId(&trackingId); // if (trackingId == lastBodyTrackingId) { // pBodyToTrack = pBody; // } // } // } // } //} //if (pBodyToTrack == NULL && lastBodyTrackingId != NULL) { // ofLogNotice("Lost body. Allowing new body to step in."); // lastBodyTrackingId = NULL; //Allow new body to step in //} for (int i = 0; i < nBodyCount; ++i) { IBody* pBodyToTrack = ppBodies[i]; if (pBodyToTrack) { BOOLEAN bTracked = false; hr = pBodyToTrack->get_IsTracked(&bTracked); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; ofVec2f jointPoints[JointType_Count]; leftHandStates[i] = HandState_Unknown; rightHandStates[i] = HandState_Unknown; pBodyToTrack->get_HandLeftState(&leftHandStates[i]); pBodyToTrack->get_HandRightState(&rightHandStates[i]); hr = pBodyToTrack->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, 1024, 768); } lastChestPositions[trackedBodies] = jointPoints[JointType_Neck]; lastHandPositionLeft[trackedBodies] = jointPoints[JointType_HandLeft]; lastHandPositionRight[trackedBodies] = jointPoints[JointType_HandRight]; pBodyToTrack->get_TrackingId(&lastBodyTrackingIds[trackedBodies]); trackedBodies++; //DrawBody(joints, jointPoints); } } } } //hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; //DiscardDirect2DResources(); } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = { 0 }; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { //Declare a struct to collect data struct FRAME_RESUME actual_frame; if (m_hWnd) { HRESULT hr = EnsureDirect2DResources(); if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper) { m_pRenderTarget->BeginDraw(); m_pRenderTarget->Clear(); RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); int width = rct.right; int height = rct.bottom; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); //If the person is tracked we assign true '1', else we assign false '0' actual_frame.person[i].isTracked = bTracked; //If the person is tracked if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); //If current joint is not tracked if (joints[j].TrackingState == TrackingState_NotTracked) { actual_frame.person[i].body.joint[j].isTracked = 0; actual_frame.person[i].body.joint[j].coordinate3D.X = 0; actual_frame.person[i].body.joint[j].coordinate3D.Y = 0; actual_frame.person[i].body.joint[j].coordinate3D.Z = 0; actual_frame.person[i].body.joint[j].coordinate2D.X = 0; actual_frame.person[i].body.joint[j].coordinate2D.Y = 0; } else { if (joints[j].TrackingState == TrackingState_Tracked) { actual_frame.person[i].body.joint[j].isTracked = 1; } else { if (joints[j].TrackingState == TrackingState_Inferred) { actual_frame.person[i].body.joint[j].isTracked = 2; } else { actual_frame.person[i].body.joint[j].isTracked = 3; } } actual_frame.person[i].body.joint[j].coordinate3D.X = joints[j].Position.X; actual_frame.person[i].body.joint[j].coordinate3D.Y = joints[j].Position.Y; actual_frame.person[i].body.joint[j].coordinate3D.Z = joints[j].Position.Z; actual_frame.person[i].body.joint[j].coordinate2D.X = jointPoints[j].x; actual_frame.person[i].body.joint[j].coordinate2D.Y = jointPoints[j].y; } } DrawBody(joints, jointPoints); DrawHand(leftHandState, jointPoints[JointType_HandLeft]); DrawHand(rightHandState, jointPoints[JointType_HandRight]); } //Gesture values //Get body gesture //BodyGestureIdentification(actual_frame.person[i], &actual_frame.person[i].bodyGesture.type, &actual_frame.person[i].bodyGesture.parameter1); BodyPoseIdentification(actual_frame.person[i], &actual_frame.person[i].bodyPose.type); //Get hand gestures actual_frame.person[i].leftHandGesture.type = leftHandState; actual_frame.person[i].leftHandGesture.parameter1 = 0; actual_frame.person[i].rightHandGesture.type = rightHandState; actual_frame.person[i].rightHandGesture.parameter1 = 0; } //If the person is not tracked put all its values in cero else { //All joints go to zero for (int j = 0; j <25; j++) { actual_frame.person[i].body.joint[j].isTracked = 0; actual_frame.person[i].body.joint[j].coordinate3D.X = 0; actual_frame.person[i].body.joint[j].coordinate3D.Y = 0; actual_frame.person[i].body.joint[j].coordinate3D.Z = 0; actual_frame.person[i].body.joint[j].coordinate2D.X = 0; actual_frame.person[i].body.joint[j].coordinate2D.Y = 0; } //All gestures go to zero actual_frame.person[i].bodyPose.type = 0; actual_frame.person[i].bodyGesture.type = 0; actual_frame.person[i].bodyGesture.parameter1 = 0; actual_frame.person[i].leftHandGesture.type = 0; actual_frame.person[i].leftHandGesture.parameter1 = 0; actual_frame.person[i].rightHandGesture.type = 0; actual_frame.person[i].rightHandGesture.parameter1 = 0; } } } hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; DiscardDirect2DResources(); } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart; m_nFramesSinceUpdate = 0; } } //Collected results are sent to a file write_a_frame_resume_in_file(actual_frame); }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { HRESULT hr = EnsureDirect2DResources(); if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper) { m_pRenderTarget->BeginDraw(); m_pRenderTarget->Clear(); RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); int width = rct.right; int height = rct.bottom; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } DrawBody(joints, jointPoints); DrawHand(leftHandState, jointPoints[JointType_HandLeft]); DrawHand(rightHandState, jointPoints[JointType_HandRight]); } } } } hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; DiscardDirect2DResources(); } } FILE *fp; if ((fp = fopen("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\speech.txt", "r")) != NULL) { if ((fp = fopen("body.txt", "r")) != NULL) { std::ifstream ifsa("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\speech.txt"); std::string linea; int si = 0; while (std::getline(ifsa, linea)) { si++; } std::ifstream ifsb("body.txt"); std::string lineb; int bi = 0; while (std::getline(ifsb, lineb)) { bi++; } if (si > bi) { isWrite = true; std::ofstream ofs("body.txt", std::ios::app); for (int j = 0; j < si - bi; j++) { ofs << "start" << std::endl; } } } } if ((fp = fopen("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\delete.txt", "r")) != NULL) { if ((fp = fopen("delete.txt", "r")) != NULL) { std::ifstream ifsa("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\delete.txt"); std::string linea; int si = 0; while (std::getline(ifsa, linea)) { si++; } std::ifstream ifsb("delete.txt"); std::string lineb; int bi = 0; while (std::getline(ifsb, lineb)) { bi++; } if (si > bi) { system("ruby C:\\Users\\tuchiyama\\Documents\\odorimming\\make_html.rb undo"); std::ofstream ofs("delete.txt", std::ios::app); for (int j = 0; j < si - bi; j++) { ofs << "delete" << std::endl; } } } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate+=1; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart ; m_nFramesSinceUpdate = 0; } } }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void CColorBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { HRESULT hr = S_OK; D2D1_POINT_2F start; start.x = 1500.0; start.y = 800.0; D2D1_POINT_2F quit; quit.x = 300.0; quit.y = 800.0; //int width = 0; //int height = 0; if (SUCCEEDED(hr) && m_pCoordinateMapper) { // 先に実行しているProcessColor()にて行っているのでコメント //hr = m_pDrawColor->BeginDraw(); DetectionResult nEngaged[6] = { DetectionResult_Unknown }; PointF lean; //RECT rct; //GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); //width = rct.right; //height = rct.bottom; UINT64 nTrackBody = 10; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { // 手旗二人での対戦モードを想定してインデックスを取得する。 // 本来はゲーム前に対戦の二人wフィックスしておくべきだろうが。 // // トラッキングされているボディかはちゃんと確かめること。 BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。 hr = pBody->get_Engaged(&nEngaged[i]); pBody->get_Lean(&lean); // 以下はまだ使えないようだ //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]); if (SUCCEEDED(hr) && bTracked) { // トラッキングが無効な場合のインデックスは0が返るので使い方に注意!! UINT64 nBodyIndex = 0; hr = pBody->get_TrackingId(&nBodyIndex); Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { // スクリーン座標に変換 for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position); } // ここに頭部に丸を描いて、ボディ番号を表示 m_pDrawColor->DrawHead(jointPoints[JointType_Head], i, nEngaged[i], lean); // 手先がある領域にきたら実行 // ボタンのような // 現状、複数人が認識されても実行するので、本来は最初に認識された一人のみにする必要がある。 float xy[2] = { 0.0 }; if (!m_bSemaphore) { if (m_pSemaphore[0]) { delete m_pSemaphore[0]; m_pSemaphore[0] = NULL; } if (m_pSemaphore[1]) { delete m_pSemaphore[1]; m_pSemaphore[1] = NULL; } m_nButton = 1; xy[0] = jointPoints[JointType_HandTipRight].x - start.x; xy[1] = jointPoints[JointType_HandTipRight].y - start.y; if (sqrt( xy[0]*xy[0] + xy[1]*xy[1] ) < 100.0 ) { if (nTrackBody == 10 || nTrackBody == nBodyIndex) { m_nButton = 0; nTrackBody = nBodyIndex; } } } else { // 手旗スタート // 手旗判定 if (m_pSemaphore[0] == NULL) { m_pSemaphore[0] = new Semaphore( &nBodyIndex ); } else { if (m_pSemaphore[1] == NULL && !m_pSemaphore[0]->ItsMe(&nBodyIndex)) { m_pSemaphore[1] = new Semaphore(&nBodyIndex); } } // カウント // 基本ポーズでのデータ取得 // 手旗本番処理 // 手旗の判定に画像と同等のフレームは必要はないのでは。 // タイマーでBodyフレームを取得し、それで手旗判定を行う。 if (m_pSemaphore[0]) { m_pSemaphore[0]->SetSignalType(&nBodyIndex, jointPoints, m_pDrawColor); } if (m_pSemaphore[1]) { m_pSemaphore[1]->SetSignalType(&nBodyIndex, jointPoints, m_pDrawColor); } //m_pSemaphore[0]->Practice(nTime, jointPoints, m_pDrawColor); // quitボタン処理 m_nButton = 2; // 基本ポーズ用の表示 xy[0] = jointPoints[JointType_HandTipLeft].x - quit.x; xy[1] = jointPoints[JointType_HandTipLeft].y - quit.y; if (sqrt( xy[0]*xy[0] + xy[1]*xy[1] ) < 100.0 ) { if (nTrackBody == 10 || nTrackBody == nBodyIndex) { m_nButton = 0; nTrackBody = nBodyIndex; } } } m_pDrawColor->DrawBody(joints, jointPoints); //m_pDrawColor->DrawHand(leftHandState, jointPoints[JointType_HandLeft]); //m_pDrawColor->DrawHand(rightHandState, jointPoints[JointType_HandRight]); Detect(pBody); //break; } } } } if (!m_bSemaphore) { // このボタン処理でウィンドウにメッセージを送っている m_pDrawColor->DrawButton(start, m_nButton); } else { m_pDrawColor->DrawButton(quit, m_nButton); } // 二人対戦モードのお題表示 if (Question(nTime)) { m_pDrawColor->DrawButton(quit, 0); } m_pDrawColor->EndDraw(); } } }