/// <summary>
/// Handle new depth and color data
/// <param name="nTime">timestamp of frame</param>
/// <param name="pDepthBuffer">pointer to depth frame data</param>
/// <param name="nDepthWidth">width (in pixels) of input depth image data</param>
/// <param name="nDepthHeight">height (in pixels) of input depth image data</param>
/// <param name="pColorBuffer">pointer to color frame data</param>
/// <param name="nColorWidth">width (in pixels) of input color image data</param>
/// <param name="nColorHeight">height (in pixels) of input color image data</param>
/// <param name="pBodyIndexBuffer">pointer to body index frame data</param>
/// <param name="nBodyIndexWidth">width (in pixels) of input body index data</param>
/// <param name="nBodyIndexHeight">height (in pixels) of input body index data</param>
/// </summary>
void CCoordinateMappingBasics::ProcessFrame(INT64 nTime, 
                                            const UINT16* pDepthBuffer, int nDepthWidth, int nDepthHeight, 
                                            const RGBQUAD* pColorBuffer, int nColorWidth, int nColorHeight,
                                            const BYTE* pBodyIndexBuffer, int nBodyIndexWidth, int nBodyIndexHeight,
											int nBodyCount, IBody** ppBodies)
{
    if (m_hWnd)
    {
        if (!m_nStartTime)
        {
            m_nStartTime = nTime;
        }

        double fps = 0.0;

        LARGE_INTEGER qpcNow = {0};
        if (m_fFreq)
        {
            if (QueryPerformanceCounter(&qpcNow))
            {
                if (m_nLastCounter)
                {
                    m_nFramesSinceUpdate++;
                    fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
                }
            }
        }

        WCHAR szStatusMessage[64];
        StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f    Time = %I64d", fps, (nTime - m_nStartTime));

        if (SetStatusMessage(szStatusMessage, 1000, false))
        {
            m_nLastCounter = qpcNow.QuadPart;
            m_nFramesSinceUpdate = 0;
        }
    }

    // Make sure we've received valid data
    if (m_pCoordinateMapper && m_pColorCoordinates && m_pOutputRGBX && 
        pDepthBuffer && (nDepthWidth == cDepthWidth) && (nDepthHeight == cDepthHeight) && 
        pColorBuffer && (nColorWidth == cColorWidth) && (nColorHeight == cColorHeight) &&
        pBodyIndexBuffer && (nBodyIndexWidth == cDepthWidth) && (nBodyIndexHeight == cDepthHeight))
    {
        HRESULT hr = m_pCoordinateMapper->MapDepthFrameToColorSpace(nDepthWidth * nDepthHeight, (UINT16*)pDepthBuffer,nDepthWidth * nDepthHeight, m_pColorCoordinates);
        if (SUCCEEDED(hr))
        {
            RGBQUAD c_green = {0, 255, 0}; 

            // loop over pixel of the output
            for (int depthIndex = 0; depthIndex < (nDepthWidth * nDepthHeight); ++depthIndex)
            {
                // default setting source to copy from the background pixel
                const RGBQUAD* pSrc = (m_pBackgroundRGBX) ? (m_pBackgroundRGBX + depthIndex) : &c_green; 

                BYTE player = pBodyIndexBuffer[depthIndex];

                // if we're tracking a player for the current pixel, draw from the color camera
                if (player != 0xff)
                {
                    // retrieve the depth to color mapping for the current depth pixel
                    ColorSpacePoint colorPoint = m_pColorCoordinates[depthIndex];

                    // make sure the depth pixel maps to a valid point in color space
                    int colorX = (int)(floor(colorPoint.X + 0.5));
                    int colorY = (int)(floor(colorPoint.Y + 0.5));
                    if ((colorX >= 0) && (colorX < nColorWidth) && (colorY >= 0) && (colorY < nColorHeight))
                    {
                        // calculate index into color array
                        int colorIndex = colorX + (colorY * nColorWidth);
                        // set source for copy to the color pixel
                        pSrc = m_pColorRGBX + colorIndex;
                    }
                }

                // write output
                m_pOutputRGBX[depthIndex] = *pSrc;
            }

            // Draw the data with Direct2D
            m_pDrawCoordinateMapping->Draw(reinterpret_cast<BYTE*>(m_pOutputRGBX), cDepthWidth * cDepthHeight * sizeof(RGBQUAD));

            if (m_bSaveScreenshot)
            {
                WCHAR szScreenshotPath[MAX_PATH];

                // Retrieve the path to My Photos
                GetScreenshotFileName(szScreenshotPath, _countof(szScreenshotPath));

                // Write out the bitmap to disk
                HRESULT hr = SaveBitmapToFile(reinterpret_cast<BYTE*>(m_pOutputRGBX), nDepthWidth, nDepthHeight, sizeof(RGBQUAD) * 8, szScreenshotPath);

                WCHAR szStatusMessage[64 + MAX_PATH];
                if (SUCCEEDED(hr))
                {
                    // Set the status bar to show where the screenshot was saved
                    StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Screenshot saved to %s", szScreenshotPath);
                }
                else
                {
                    StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Failed to write screenshot to %s", szScreenshotPath);
                }

                SetStatusMessage(szStatusMessage, 5000, true);

                // toggle off so we don't save a screenshot again next frame
                m_bSaveScreenshot = false;
            }
        }
    }

	D2D1_POINT_2F center;
	center.x = 400.0;
	center.y = 100.0;

	int width = 0;
	int height = 0;
	if (m_pCoordinateMapper)
	{

		RECT rct;
		GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
		width = rct.right;
		height = rct.bottom;

		DWORD clipedge = 0;

		for (int i = 0; i < nBodyCount; ++i)
		{
			IBody* pBody = ppBodies[i];
			if (pBody)
			{
				BOOLEAN bTracked = false;
				HRESULT hr = pBody->get_IsTracked(&bTracked);

				// Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。
				//hr = pBody->get_Engaged(&nEngaged[i]);
				// 以下はまだ使えないようだ
				//hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]);
				pBody->get_ClippedEdges(&clipedge);

				if (SUCCEEDED(hr) && bTracked)
				{
					Joint joints[JointType_Count];
					D2D1_POINT_2F jointPoints[JointType_Count];
					HandState leftHandState = HandState_Unknown;
					HandState rightHandState = HandState_Unknown;

					pBody->get_HandLeftState(&leftHandState);
					pBody->get_HandRightState(&rightHandState);

					hr = pBody->GetJoints(_countof(joints), joints);
					if (SUCCEEDED(hr))
					{
						for (int j = 0; j < _countof(joints); ++j)
						{
							jointPoints[j] = BodyToScreen(joints[j].Position, width, height);
						}

						m_pDrawCoordinateMapping->DrawBody(joints, jointPoints);

						// ここに頭部に丸を描いて、ボディ番号を表示
						m_pDrawCoordinateMapping->DrawHead(jointPoints[JointType_Head], i, clipedge/*, nEngaged[i]*/);

						m_pDrawCoordinateMapping->DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
						m_pDrawCoordinateMapping->DrawHand(rightHandState, jointPoints[JointType_HandRight]);

						// 手先がある領域にきたら実行
						// ボタンのような
						// 現状、複数人が認識されても実行するので、本来は最初に認識された一人のみにする必要がある。
						float xy[2] = { 0.0 };
						xy[0] = jointPoints[JointType_HandTipRight].x - center.x;
						xy[1] = jointPoints[JointType_HandTipRight].y - center.y;

						m_nButton = 0;
						if (sqrt(xy[0] * xy[0] + xy[1] * xy[1]) < 50.0)
						{
							m_nButton = 1;
						}
						m_pDrawCoordinateMapping->DrawButton(center, m_nButton);
					}
				}
			}
		}
		m_pDrawCoordinateMapping->EndDraw();
	}
}