void BreathingClass::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { double testVal = 0; if (m_hWnd) { HRESULT hr = EnsureDirect2DResources(); if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper) { m_pRenderTarget->BeginDraw(); m_pRenderTarget->Clear(); RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); int width = rct.right; int height = rct.bottom; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } //DrawHand(leftHandState, jointPoints[JointType_ShoulderLeft]); float shoulderLeft = jointPoints[JointType_ShoulderLeft].y; float shoulderRight = jointPoints[JointType_ShoulderRight].y; float shoulderSpine = jointPoints[JointType_SpineShoulder].y; // average shoulder height testVal = (shoulderLeft + shoulderRight) / 2.0f * 100; BreathUpdate2(testVal); } } } } hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; DiscardDirect2DResources(); } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0; if (GetBreathingIn()) fps = 1.0f; else fps = 0.0f; LARGE_INTEGER qpcNow = { 0 }; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; // fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; //StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); } }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { HRESULT hr = EnsureDirect2DResources(); DetectionResult nEngaged[6] = { DetectionResult_Unknown }; int width = 0; int height = 0; if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper) { m_pRenderTarget->BeginDraw(); m_pRenderTarget->Clear(); RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); width = rct.right; height = rct.bottom; for (int i = 0; i < nBodyCount; ++i) { nEngaged[i] = DetectionResult_Maybe; IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。 hr = pBody->get_Engaged( &nEngaged[i] ); // 以下はまだ使えないようだ //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } DrawBody(joints, jointPoints); // ここに頭部に丸を描いて、ボディ番号を表示 DrawHead(jointPoints[JointType_Head], i, nEngaged[i]); DrawHand(leftHandState, jointPoints[JointType_HandLeft]); DrawHand(rightHandState, jointPoints[JointType_HandRight]); } } } } hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; DiscardDirect2DResources(); } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[128] ; StringCchPrintf( szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d width:%d height:%d", fps, (nTime - m_nStartTime), width, height); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart; m_nFramesSinceUpdate = 0; } } }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void testApp::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { HRESULT hr; int trackedBodies = 0; if (m_pCoordinateMapper) { //IBody* pBodyToTrack = NULL; //IBody* pBody2ToTrack = NULL; //UINT64 trackingId; //for (int i = 0; i < nBodyCount; ++i) //{ // IBody* pBody = ppBodies[i]; // if (lastBodyTrackingId == NULL || lastBody2TrackingId == NULL) // { // //Init a new body tracking // if (pBody) { // BOOLEAN bTracked = false; // hr = pBody->get_IsTracked(&bTracked); // if (SUCCEEDED(hr) && bTracked) { // ofLogNotice("Body is tracked"); // if(lastBodyTrackingId == NULL) // hr = pBody->get_TrackingId(&lastBodyTrackingId); // else // hr = pBody->get_TrackingId(&lastBody2TrackingId); // if (SUCCEEDED(hr)) { // ofLogNotice("Found body to track"); // pBodyToTrack = pBody; // } // break; // } // } // } // else { // //Some body is already tracked // if (pBody) { // BOOLEAN bTracked = false; // hr = pBody->get_IsTracked(&bTracked); // if (SUCCEEDED(hr) && bTracked) { // pBody->get_TrackingId(&trackingId); // if (trackingId == lastBodyTrackingId) { // pBodyToTrack = pBody; // } // } // } // } //} //if (pBodyToTrack == NULL && lastBodyTrackingId != NULL) { // ofLogNotice("Lost body. Allowing new body to step in."); // lastBodyTrackingId = NULL; //Allow new body to step in //} for (int i = 0; i < nBodyCount; ++i) { IBody* pBodyToTrack = ppBodies[i]; if (pBodyToTrack) { BOOLEAN bTracked = false; hr = pBodyToTrack->get_IsTracked(&bTracked); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; ofVec2f jointPoints[JointType_Count]; leftHandStates[i] = HandState_Unknown; rightHandStates[i] = HandState_Unknown; pBodyToTrack->get_HandLeftState(&leftHandStates[i]); pBodyToTrack->get_HandRightState(&rightHandStates[i]); hr = pBodyToTrack->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, 1024, 768); } lastChestPositions[trackedBodies] = jointPoints[JointType_Neck]; lastHandPositionLeft[trackedBodies] = jointPoints[JointType_HandLeft]; lastHandPositionRight[trackedBodies] = jointPoints[JointType_HandRight]; pBodyToTrack->get_TrackingId(&lastBodyTrackingIds[trackedBodies]); trackedBodies++; //DrawBody(joints, jointPoints); } } } } //hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; //DiscardDirect2DResources(); } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = { 0 }; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } }
/// <summary> /// Handle new depth and color data /// <param name="nTime">timestamp of frame</param> /// <param name="pDepthBuffer">pointer to depth frame data</param> /// <param name="nDepthWidth">width (in pixels) of input depth image data</param> /// <param name="nDepthHeight">height (in pixels) of input depth image data</param> /// <param name="pColorBuffer">pointer to color frame data</param> /// <param name="nColorWidth">width (in pixels) of input color image data</param> /// <param name="nColorHeight">height (in pixels) of input color image data</param> /// <param name="pBodyIndexBuffer">pointer to body index frame data</param> /// <param name="nBodyIndexWidth">width (in pixels) of input body index data</param> /// <param name="nBodyIndexHeight">height (in pixels) of input body index data</param> /// </summary> void CCoordinateMappingBasics::ProcessFrame(INT64 nTime, const UINT16* pDepthBuffer, int nDepthWidth, int nDepthHeight, const RGBQUAD* pColorBuffer, int nColorWidth, int nColorHeight, const BYTE* pBodyIndexBuffer, int nBodyIndexWidth, int nBodyIndexHeight, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart; m_nFramesSinceUpdate = 0; } } // Make sure we've received valid data if (m_pCoordinateMapper && m_pColorCoordinates && m_pOutputRGBX && pDepthBuffer && (nDepthWidth == cDepthWidth) && (nDepthHeight == cDepthHeight) && pColorBuffer && (nColorWidth == cColorWidth) && (nColorHeight == cColorHeight) && pBodyIndexBuffer && (nBodyIndexWidth == cDepthWidth) && (nBodyIndexHeight == cDepthHeight)) { HRESULT hr = m_pCoordinateMapper->MapDepthFrameToColorSpace(nDepthWidth * nDepthHeight, (UINT16*)pDepthBuffer,nDepthWidth * nDepthHeight, m_pColorCoordinates); if (SUCCEEDED(hr)) { RGBQUAD c_green = {0, 255, 0}; // loop over pixel of the output for (int depthIndex = 0; depthIndex < (nDepthWidth * nDepthHeight); ++depthIndex) { // default setting source to copy from the background pixel const RGBQUAD* pSrc = (m_pBackgroundRGBX) ? (m_pBackgroundRGBX + depthIndex) : &c_green; BYTE player = pBodyIndexBuffer[depthIndex]; // if we're tracking a player for the current pixel, draw from the color camera if (player != 0xff) { // retrieve the depth to color mapping for the current depth pixel ColorSpacePoint colorPoint = m_pColorCoordinates[depthIndex]; // make sure the depth pixel maps to a valid point in color space int colorX = (int)(floor(colorPoint.X + 0.5)); int colorY = (int)(floor(colorPoint.Y + 0.5)); if ((colorX >= 0) && (colorX < nColorWidth) && (colorY >= 0) && (colorY < nColorHeight)) { // calculate index into color array int colorIndex = colorX + (colorY * nColorWidth); // set source for copy to the color pixel pSrc = m_pColorRGBX + colorIndex; } } // write output m_pOutputRGBX[depthIndex] = *pSrc; } // Draw the data with Direct2D m_pDrawCoordinateMapping->Draw(reinterpret_cast<BYTE*>(m_pOutputRGBX), cDepthWidth * cDepthHeight * sizeof(RGBQUAD)); if (m_bSaveScreenshot) { WCHAR szScreenshotPath[MAX_PATH]; // Retrieve the path to My Photos GetScreenshotFileName(szScreenshotPath, _countof(szScreenshotPath)); // Write out the bitmap to disk HRESULT hr = SaveBitmapToFile(reinterpret_cast<BYTE*>(m_pOutputRGBX), nDepthWidth, nDepthHeight, sizeof(RGBQUAD) * 8, szScreenshotPath); WCHAR szStatusMessage[64 + MAX_PATH]; if (SUCCEEDED(hr)) { // Set the status bar to show where the screenshot was saved StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Screenshot saved to %s", szScreenshotPath); } else { StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Failed to write screenshot to %s", szScreenshotPath); } SetStatusMessage(szStatusMessage, 5000, true); // toggle off so we don't save a screenshot again next frame m_bSaveScreenshot = false; } } } D2D1_POINT_2F center; center.x = 400.0; center.y = 100.0; int width = 0; int height = 0; if (m_pCoordinateMapper) { RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); width = rct.right; height = rct.bottom; DWORD clipedge = 0; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; HRESULT hr = pBody->get_IsTracked(&bTracked); // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。 //hr = pBody->get_Engaged(&nEngaged[i]); // 以下はまだ使えないようだ //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]); pBody->get_ClippedEdges(&clipedge); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } m_pDrawCoordinateMapping->DrawBody(joints, jointPoints); // ここに頭部に丸を描いて、ボディ番号を表示 m_pDrawCoordinateMapping->DrawHead(jointPoints[JointType_Head], i, clipedge/*, nEngaged[i]*/); m_pDrawCoordinateMapping->DrawHand(leftHandState, jointPoints[JointType_HandLeft]); m_pDrawCoordinateMapping->DrawHand(rightHandState, jointPoints[JointType_HandRight]); // 手先がある領域にきたら実行 // ボタンのような // 現状、複数人が認識されても実行するので、本来は最初に認識された一人のみにする必要がある。 float xy[2] = { 0.0 }; xy[0] = jointPoints[JointType_HandTipRight].x - center.x; xy[1] = jointPoints[JointType_HandTipRight].y - center.y; m_nButton = 0; if (sqrt(xy[0] * xy[0] + xy[1] * xy[1]) < 50.0) { m_nButton = 1; } m_pDrawCoordinateMapping->DrawButton(center, m_nButton); } } } } m_pDrawCoordinateMapping->EndDraw(); } }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { //Declare a struct to collect data struct FRAME_RESUME actual_frame; if (m_hWnd) { HRESULT hr = EnsureDirect2DResources(); if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper) { m_pRenderTarget->BeginDraw(); m_pRenderTarget->Clear(); RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); int width = rct.right; int height = rct.bottom; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); //If the person is tracked we assign true '1', else we assign false '0' actual_frame.person[i].isTracked = bTracked; //If the person is tracked if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); //If current joint is not tracked if (joints[j].TrackingState == TrackingState_NotTracked) { actual_frame.person[i].body.joint[j].isTracked = 0; actual_frame.person[i].body.joint[j].coordinate3D.X = 0; actual_frame.person[i].body.joint[j].coordinate3D.Y = 0; actual_frame.person[i].body.joint[j].coordinate3D.Z = 0; actual_frame.person[i].body.joint[j].coordinate2D.X = 0; actual_frame.person[i].body.joint[j].coordinate2D.Y = 0; } else { if (joints[j].TrackingState == TrackingState_Tracked) { actual_frame.person[i].body.joint[j].isTracked = 1; } else { if (joints[j].TrackingState == TrackingState_Inferred) { actual_frame.person[i].body.joint[j].isTracked = 2; } else { actual_frame.person[i].body.joint[j].isTracked = 3; } } actual_frame.person[i].body.joint[j].coordinate3D.X = joints[j].Position.X; actual_frame.person[i].body.joint[j].coordinate3D.Y = joints[j].Position.Y; actual_frame.person[i].body.joint[j].coordinate3D.Z = joints[j].Position.Z; actual_frame.person[i].body.joint[j].coordinate2D.X = jointPoints[j].x; actual_frame.person[i].body.joint[j].coordinate2D.Y = jointPoints[j].y; } } DrawBody(joints, jointPoints); DrawHand(leftHandState, jointPoints[JointType_HandLeft]); DrawHand(rightHandState, jointPoints[JointType_HandRight]); } //Gesture values //Get body gesture //BodyGestureIdentification(actual_frame.person[i], &actual_frame.person[i].bodyGesture.type, &actual_frame.person[i].bodyGesture.parameter1); BodyPoseIdentification(actual_frame.person[i], &actual_frame.person[i].bodyPose.type); //Get hand gestures actual_frame.person[i].leftHandGesture.type = leftHandState; actual_frame.person[i].leftHandGesture.parameter1 = 0; actual_frame.person[i].rightHandGesture.type = rightHandState; actual_frame.person[i].rightHandGesture.parameter1 = 0; } //If the person is not tracked put all its values in cero else { //All joints go to zero for (int j = 0; j <25; j++) { actual_frame.person[i].body.joint[j].isTracked = 0; actual_frame.person[i].body.joint[j].coordinate3D.X = 0; actual_frame.person[i].body.joint[j].coordinate3D.Y = 0; actual_frame.person[i].body.joint[j].coordinate3D.Z = 0; actual_frame.person[i].body.joint[j].coordinate2D.X = 0; actual_frame.person[i].body.joint[j].coordinate2D.Y = 0; } //All gestures go to zero actual_frame.person[i].bodyPose.type = 0; actual_frame.person[i].bodyGesture.type = 0; actual_frame.person[i].bodyGesture.parameter1 = 0; actual_frame.person[i].leftHandGesture.type = 0; actual_frame.person[i].leftHandGesture.parameter1 = 0; actual_frame.person[i].rightHandGesture.type = 0; actual_frame.person[i].rightHandGesture.parameter1 = 0; } } } hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; DiscardDirect2DResources(); } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart; m_nFramesSinceUpdate = 0; } } //Collected results are sent to a file write_a_frame_resume_in_file(actual_frame); }
/// Handle new body data void CBodyBasics::ProcessBody(int nBodyCount, IBody** ppBodies) { //记录操作结果是否成功 HRESULT hr; //对于每一个IBody for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody)//还没有搞明白这里pBody和下面的bTracked有什么区别 { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count];//存储关节点类 HandState leftHandState = HandState_Unknown;//左手状态 HandState rightHandState = HandState_Unknown;//右手状态 //获取左右手状态 pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); //存储深度坐标系中的关节点位置 DepthSpacePoint *depthSpacePosition = new DepthSpacePoint[_countof(joints)]; //获得关节点类 hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { clear = false; for (int j = 0; j < _countof(joints); ++j) { //将关节点坐标从摄像机坐标系(-1~1)转到深度坐标系(424*512) m_pCoordinateMapper->MapCameraPointToDepthSpace(joints[j].Position, &depthSpacePosition[j]); } bodyXY[i][0] = depthSpacePosition[JointType_SpineMid].X; bodyXY[i][1] = depthSpacePosition[JointType_SpineMid].Y; angle[i] = atan(joints[JointType_SpineMid].Position.X / joints[JointType_SpineMid].Position.Z); if ( 0 > bodyXY[i][0] || bodyXY[i][0] > 512 || 0 > bodyXY[i][1] || bodyXY[i][1] > 424 ) { bodyXY[i][0] = -1; bodyXY[i][1] = -1; } //------------------------hand state left------------------------------- DrawHandState(depthSpacePosition[JointType_HandLeft], leftHandState); DrawHandState(depthSpacePosition[JointType_HandRight], rightHandState); //---------------------------body------------------------------- DrawBone(joints, depthSpacePosition, JointType_Head, JointType_Neck); DrawBone(joints, depthSpacePosition, JointType_Neck, JointType_SpineShoulder); DrawBone(joints, depthSpacePosition, JointType_SpineShoulder, JointType_SpineMid); DrawBone(joints, depthSpacePosition, JointType_SpineMid, JointType_SpineBase); DrawBone(joints, depthSpacePosition, JointType_SpineShoulder, JointType_ShoulderRight); DrawBone(joints, depthSpacePosition, JointType_SpineShoulder, JointType_ShoulderLeft); DrawBone(joints, depthSpacePosition, JointType_SpineBase, JointType_HipRight); DrawBone(joints, depthSpacePosition, JointType_SpineBase, JointType_HipLeft); // -----------------------Right Arm ------------------------------------ DrawBone(joints, depthSpacePosition, JointType_ShoulderRight, JointType_ElbowRight); DrawBone(joints, depthSpacePosition, JointType_ElbowRight, JointType_WristRight); DrawBone(joints, depthSpacePosition, JointType_WristRight, JointType_HandRight); DrawBone(joints, depthSpacePosition, JointType_HandRight, JointType_HandTipRight); DrawBone(joints, depthSpacePosition, JointType_WristRight, JointType_ThumbRight); //----------------------------------- Left Arm-------------------------- DrawBone(joints, depthSpacePosition, JointType_ShoulderLeft, JointType_ElbowLeft); DrawBone(joints, depthSpacePosition, JointType_ElbowLeft, JointType_WristLeft); DrawBone(joints, depthSpacePosition, JointType_WristLeft, JointType_HandLeft); DrawBone(joints, depthSpacePosition, JointType_HandLeft, JointType_HandTipLeft); DrawBone(joints, depthSpacePosition, JointType_WristLeft, JointType_ThumbLeft); // ----------------------------------Right Leg-------------------------------- DrawBone(joints, depthSpacePosition, JointType_HipRight, JointType_KneeRight); DrawBone(joints, depthSpacePosition, JointType_KneeRight, JointType_AnkleRight); DrawBone(joints, depthSpacePosition, JointType_AnkleRight, JointType_FootRight); // -----------------------------------Left Leg--------------------------------- DrawBone(joints, depthSpacePosition, JointType_HipLeft, JointType_KneeLeft); DrawBone(joints, depthSpacePosition, JointType_KneeLeft, JointType_AnkleLeft); DrawBone(joints, depthSpacePosition, JointType_AnkleLeft, JointType_FootLeft); } delete[] depthSpacePosition; } } } cv::imshow("skeletonImg", skeletonImg); cv::waitKey(5); }
void KinectV2Module::processBody(int nBodyCount, IBody ** ppBodies) { IBody* pBody = nullptr; if (curBodyIndex != -1) { IBody* b = ppBodies[curBodyIndex]; if (b) { BOOLEAN t; HRESULT hr = b->get_IsTracked(&t); if (SUCCEEDED(hr) && t) pBody = b; } } if (pBody == nullptr) { for (int i = 0; i < nBodyCount; i++) { IBody* b = ppBodies[i]; if (!b) continue; BOOLEAN t; HRESULT hr = b->get_IsTracked(&t); if (SUCCEEDED(hr) && t) { pBody = b; curBodyIndex = i; break; } } } if (pBody == nullptr) { curBodyIndex = -1; return; } Joint joints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); pBody->GetJoints(_countof(joints), joints); Vector3D<float> leftHandPos = Vector3D<float>(joints[JointType_HandLeft].Position.X, joints[JointType_HandLeft].Position.Y, joints[JointType_HandLeft].Position.Z); Vector3D<float> rightHandPos = Vector3D<float>(joints[JointType_HandRight].Position.X, joints[JointType_HandRight].Position.Y, joints[JointType_HandRight].Position.Z); Point<float> left2D = Point<float>(leftHandPos.x, leftHandPos.y); Point<float> right2D = Point<float>(rightHandPos.x, rightHandPos.y); leftHandX->setValue(leftHandPos.x); leftHandY->setValue(leftHandPos.y); rightHandX->setValue(rightHandPos.x); rightHandY->setValue(rightHandPos.y); handsDistance->setValue((rightHandPos - leftHandPos).lengthSquared()); handsAngle->setValue(radiansToDegrees(left2D.getAngleToPoint(right2D)) + 180); leftHandOpen->setValue(leftHandState == HandState_Open); rightHandOpen->setValue(rightHandState == HandState_Open); }
int main() { printf("Hello, Wellcome to kinect world!\n"); IKinectSensor* bb; //申请一个Sensor指针 HRESULT hr = GetDefaultKinectSensor(&bb); // 获取一个默认的Sensor if ( FAILED(hr) ) { printf("No Kinect connect to your pc!\n"); goto endstop; } BOOLEAN bIsOpen = 0; bb->get_IsOpen(&bIsOpen); // 查看下是否已经打开 printf("bIsOpen: %d\n", bIsOpen); if ( !bIsOpen ) // 没打开,则尝试打开 { hr = bb->Open(); if ( FAILED(hr) ) { printf("Kinect Open Failed!\n"); goto endstop; } printf("Kinect opened! But it need sometime to work!\n"); // 这里一定要多等会,否则下面的判断都是错误的 printf("Wait For 3000 ms...\n"); Sleep(3000); } bIsOpen = 0; bb->get_IsOpen(&bIsOpen); // 是否已经打开 printf("bIsOpen: %d\n", bIsOpen); BOOLEAN bAvaliable = 0; bb->get_IsAvailable(&bAvaliable); // 是否可用 printf("bAvaliable: %d\n", bAvaliable); DWORD dwCapability = 0; bb->get_KinectCapabilities(&dwCapability); // 获取容量 printf("dwCapability: %d\n", dwCapability); WCHAR bbuid[256] = { 0 }; bb->get_UniqueKinectId(256, bbuid); // 获取唯一ID printf("UID: %s\n",bbuid); // 音频数据获取 // 获取身体数据 IBodyFrameSource* bodys = nullptr; bb->get_BodyFrameSource(&bodys); // Body 数据源 INT32 nBodyNum = 0; bodys->get_BodyCount(&nBodyNum); // 获取body 个数,没用,一直是6 printf("Body Num: %d\n", nBodyNum); IBodyFrameReader* bodyr = nullptr; bodys->OpenReader(&bodyr); // 准备读取body数据 while (true) { IBodyFrame* bodyf = nullptr; bodyr->AcquireLatestFrame(&bodyf); // 获取最近的一帧数据 if ( !bodyf ) { Sleep(100); printf("."); continue; } IBody* ppBodies[BODY_COUNT] = { 0 }; bodyf->GetAndRefreshBodyData(BODY_COUNT, ppBodies); // 更新所有人身体数据 for (int i = 0; i < BODY_COUNT; ++i) { IBody* pBody = ppBodies[i]; // 轮询每个人的信息 if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); // 检测是否被跟踪,即是否有这个人 if (bTracked) { Joint joints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); // 获取左手的状态 pBody->get_HandRightState(&rightHandState); // 获取右手的状态 hr = pBody->GetJoints(_countof(joints), joints); // 获取身体的骨骼信息,25点 printf("Person %d : Joints[0].Z %.2f\n", i, joints[0].Position.X); //简单的输出他的信息 } } } for (int i = 0; i < BODY_COUNT; ++i) { ppBodies[i]->Release(); } bodyf->Release(); } endclose: bb->Close(); endstop: system("pause"); return 0; }
void KinectPlugin::ProcessBody(INT64 time, int bodyCount, IBody** bodies) { bool foundOneBody = false; if (_coordinateMapper) { for (int i = 0; i < bodyCount; ++i) { if (foundOneBody) { break; } IBody* body = bodies[i]; if (body) { BOOLEAN tracked = false; HRESULT hr = body->get_IsTracked(&tracked); if (SUCCEEDED(hr) && tracked) { foundOneBody = true; if (_joints.size() != JointType_Count) { _joints.resize(JointType_Count, { { 0.0f, 0.0f, 0.0f }, { 0.0f, 0.0f, 0.0f, 0.0f } }); } Joint joints[JointType_Count]; JointOrientation jointOrientations[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; body->get_HandLeftState(&leftHandState); body->get_HandRightState(&rightHandState); hr = body->GetJoints(_countof(joints), joints); hr = body->GetJointOrientations(_countof(jointOrientations), jointOrientations); if (SUCCEEDED(hr)) { auto jointCount = _countof(joints); //qDebug() << __FUNCTION__ << "nBodyCount:" << nBodyCount << "body:" << i << "jointCount:" << jointCount; for (int j = 0; j < jointCount; ++j) { //QString jointName = kinectJointNames[joints[j].JointType]; glm::vec3 jointPosition { joints[j].Position.X, joints[j].Position.Y, joints[j].Position.Z }; // Kinect Documentation is unclear on what these orientations are, are they absolute? // or are the relative to the parent bones. It appears as if it has changed between the // older 1.x SDK and the 2.0 sdk // // https://social.msdn.microsoft.com/Forums/en-US/31c9aff6-7dab-433d-9af9-59942dfd3d69/kinect-v20-preview-sdk-jointorientation-vs-boneorientation?forum=kinectv2sdk // seems to suggest these are absolute... // "These quaternions are absolute, so you can take a mesh in local space, transform it by the quaternion, // and it will match the exact orientation of the bone. If you want relative orientation quaternion, you // can multiply the absolute quaternion by the inverse of the parent joint's quaternion." // // - Bone direction(Y green) - always matches the skeleton. // - Normal(Z blue) - joint roll, perpendicular to the bone // - Binormal(X orange) - perpendicular to the bone and normal glm::quat jointOrientation { jointOrientations[j].Orientation.x, jointOrientations[j].Orientation.y, jointOrientations[j].Orientation.z, jointOrientations[j].Orientation.w }; // filling in the _joints data... if (joints[j].TrackingState != TrackingState_NotTracked) { _joints[j].position = jointPosition; //_joints[j].orientation = jointOrientation; } } } } } } } }
void CBodyBasics::TransmitBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { // UDP Message msg; PacketWriter pw; bool ok; HRESULT hr; for (int cptr = 0; cptr < clients.size(); cptr++) { if (clients[cptr].active == 1) { //printFucker("sending to client " + clients[cptr].address + ": " + std::to_string(nBodyCount) + " bodies!\n"); // SEND FRAME START OVER UDP msg.init("/beginFrame"); msg.pushInt32(nBodyCount); pw.init(); pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle(); ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize()); for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { msg.init("/beginBody"); msg.pushInt32(i); pw.init(); pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle(); ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize()); for (int j = 0; j < _countof(joints); ++j) { // /kinect body joint x y z msg.init("/bodyJoint"); msg.pushInt32(i); msg.pushInt32(j); // body relative - joints[1] is spineMid which maps to Torso in OpenNI msg.pushFloat(joints[j].Position.X - joints[1].Position.X); msg.pushFloat(joints[j].Position.Y - joints[1].Position.Y); msg.pushFloat(joints[j].Position.Z - joints[1].Position.Z); // send message pw.init(); pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle(); ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize()); } msg.init("/endBody"); msg.pushInt32(i); pw.init(); pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle(); ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize()); } } } } // SEND FRAME END OVER UDP msg.init("/endFrame"); pw.init(); pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle(); ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize()); } } }
//---------- void Body::update() { CHECK_OPEN IBodyFrame * frame = NULL; IFrameDescription * frameDescription = NULL; try { //acquire frame if (FAILED(this->reader->AcquireLatestFrame(&frame))) { return; // we often throw here when no new frame is available } INT64 nTime = 0; if (FAILED(frame->get_RelativeTime(&nTime))) { throw Exception("Failed to get relative time"); } if (FAILED(frame->get_FloorClipPlane(&floorClipPlane))) { throw(Exception("Failed to get floor clip plane")); } IBody* ppBodies[BODY_COUNT] = {0}; if (FAILED(frame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies))) { throw Exception("Failed to refresh body data"); } for (int i = 0; i < BODY_COUNT; ++i) { auto & body = bodies[i]; body.clear(); IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; if (FAILED(pBody->get_IsTracked(&bTracked))) { throw Exception("Failed to get tracking status"); } body.tracked = bTracked; body.bodyId = i; if (bTracked) { // retrieve tracking id UINT64 trackingId = -1; if (FAILED(pBody->get_TrackingId(&trackingId))) { throw Exception("Failed to get tracking id"); } body.trackingId = trackingId; // retrieve joint position & orientation _Joint joints[JointType_Count]; _JointOrientation jointsOrient[JointType_Count]; if (FAILED(pBody->GetJoints(JointType_Count, joints))) { throw Exception("Failed to get joints"); } if (FAILED(pBody->GetJointOrientations(JointType_Count, jointsOrient))) { throw Exception("Failed to get joints orientation"); } for (int j = 0; j < JointType_Count; ++j) { body.joints[joints[j].JointType] = Data::Joint(joints[j], jointsOrient[j]); } // retrieve hand states HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; if (FAILED(pBody->get_HandLeftState(&leftHandState))) { throw Exception("Failed to get left hand state"); } if (FAILED(pBody->get_HandRightState(&rightHandState))) { throw Exception("Failed to get right hand state"); } body.leftHandState = leftHandState; body.rightHandState = rightHandState; } } } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]); } } catch (std::exception & e) { OFXKINECTFORWINDOWS2_ERROR << e.what(); } SafeRelease(frameDescription); SafeRelease(frame); }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { HRESULT hr = EnsureDirect2DResources(); if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper) { m_pRenderTarget->BeginDraw(); m_pRenderTarget->Clear(); RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); int width = rct.right; int height = rct.bottom; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } DrawBody(joints, jointPoints); DrawHand(leftHandState, jointPoints[JointType_HandLeft]); DrawHand(rightHandState, jointPoints[JointType_HandRight]); } } } } hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; DiscardDirect2DResources(); } } FILE *fp; if ((fp = fopen("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\speech.txt", "r")) != NULL) { if ((fp = fopen("body.txt", "r")) != NULL) { std::ifstream ifsa("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\speech.txt"); std::string linea; int si = 0; while (std::getline(ifsa, linea)) { si++; } std::ifstream ifsb("body.txt"); std::string lineb; int bi = 0; while (std::getline(ifsb, lineb)) { bi++; } if (si > bi) { isWrite = true; std::ofstream ofs("body.txt", std::ios::app); for (int j = 0; j < si - bi; j++) { ofs << "start" << std::endl; } } } } if ((fp = fopen("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\delete.txt", "r")) != NULL) { if ((fp = fopen("delete.txt", "r")) != NULL) { std::ifstream ifsa("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\delete.txt"); std::string linea; int si = 0; while (std::getline(ifsa, linea)) { si++; } std::ifstream ifsb("delete.txt"); std::string lineb; int bi = 0; while (std::getline(ifsb, lineb)) { bi++; } if (si > bi) { system("ruby C:\\Users\\tuchiyama\\Documents\\odorimming\\make_html.rb undo"); std::ofstream ofs("delete.txt", std::ios::app); for (int j = 0; j < si - bi; j++) { ofs << "delete" << std::endl; } } } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate+=1; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart ; m_nFramesSinceUpdate = 0; } } }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void CColorBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { HRESULT hr = S_OK; D2D1_POINT_2F start; start.x = 1500.0; start.y = 800.0; D2D1_POINT_2F quit; quit.x = 300.0; quit.y = 800.0; //int width = 0; //int height = 0; if (SUCCEEDED(hr) && m_pCoordinateMapper) { // 先に実行しているProcessColor()にて行っているのでコメント //hr = m_pDrawColor->BeginDraw(); DetectionResult nEngaged[6] = { DetectionResult_Unknown }; PointF lean; //RECT rct; //GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); //width = rct.right; //height = rct.bottom; UINT64 nTrackBody = 10; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { // 手旗二人での対戦モードを想定してインデックスを取得する。 // 本来はゲーム前に対戦の二人wフィックスしておくべきだろうが。 // // トラッキングされているボディかはちゃんと確かめること。 BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。 hr = pBody->get_Engaged(&nEngaged[i]); pBody->get_Lean(&lean); // 以下はまだ使えないようだ //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]); if (SUCCEEDED(hr) && bTracked) { // トラッキングが無効な場合のインデックスは0が返るので使い方に注意!! UINT64 nBodyIndex = 0; hr = pBody->get_TrackingId(&nBodyIndex); Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { // スクリーン座標に変換 for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position); } // ここに頭部に丸を描いて、ボディ番号を表示 m_pDrawColor->DrawHead(jointPoints[JointType_Head], i, nEngaged[i], lean); // 手先がある領域にきたら実行 // ボタンのような // 現状、複数人が認識されても実行するので、本来は最初に認識された一人のみにする必要がある。 float xy[2] = { 0.0 }; if (!m_bSemaphore) { if (m_pSemaphore[0]) { delete m_pSemaphore[0]; m_pSemaphore[0] = NULL; } if (m_pSemaphore[1]) { delete m_pSemaphore[1]; m_pSemaphore[1] = NULL; } m_nButton = 1; xy[0] = jointPoints[JointType_HandTipRight].x - start.x; xy[1] = jointPoints[JointType_HandTipRight].y - start.y; if (sqrt( xy[0]*xy[0] + xy[1]*xy[1] ) < 100.0 ) { if (nTrackBody == 10 || nTrackBody == nBodyIndex) { m_nButton = 0; nTrackBody = nBodyIndex; } } } else { // 手旗スタート // 手旗判定 if (m_pSemaphore[0] == NULL) { m_pSemaphore[0] = new Semaphore( &nBodyIndex ); } else { if (m_pSemaphore[1] == NULL && !m_pSemaphore[0]->ItsMe(&nBodyIndex)) { m_pSemaphore[1] = new Semaphore(&nBodyIndex); } } // カウント // 基本ポーズでのデータ取得 // 手旗本番処理 // 手旗の判定に画像と同等のフレームは必要はないのでは。 // タイマーでBodyフレームを取得し、それで手旗判定を行う。 if (m_pSemaphore[0]) { m_pSemaphore[0]->SetSignalType(&nBodyIndex, jointPoints, m_pDrawColor); } if (m_pSemaphore[1]) { m_pSemaphore[1]->SetSignalType(&nBodyIndex, jointPoints, m_pDrawColor); } //m_pSemaphore[0]->Practice(nTime, jointPoints, m_pDrawColor); // quitボタン処理 m_nButton = 2; // 基本ポーズ用の表示 xy[0] = jointPoints[JointType_HandTipLeft].x - quit.x; xy[1] = jointPoints[JointType_HandTipLeft].y - quit.y; if (sqrt( xy[0]*xy[0] + xy[1]*xy[1] ) < 100.0 ) { if (nTrackBody == 10 || nTrackBody == nBodyIndex) { m_nButton = 0; nTrackBody = nBodyIndex; } } } m_pDrawColor->DrawBody(joints, jointPoints); //m_pDrawColor->DrawHand(leftHandState, jointPoints[JointType_HandLeft]); //m_pDrawColor->DrawHand(rightHandState, jointPoints[JointType_HandRight]); Detect(pBody); //break; } } } } if (!m_bSemaphore) { // このボタン処理でウィンドウにメッセージを送っている m_pDrawColor->DrawButton(start, m_nButton); } else { m_pDrawColor->DrawButton(quit, m_nButton); } // 二人対戦モードのお題表示 if (Question(nTime)) { m_pDrawColor->DrawButton(quit, 0); } m_pDrawColor->EndDraw(); } } }