void ProcessBody(unsigned int nTime, unsigned int nBodyCount, IBody * ppBodies[6]){
		UCHAR bestBody = 0xff;
		float bestScore = 0;
		
		float trackingStateTable[3];
		trackingStateTable[TrackingState_Inferred] = 0.5;
		trackingStateTable[TrackingState_NotTracked] = 0;
		trackingStateTable[TrackingState_Tracked] = 1;

		for(int i=0;i<nBodyCount;++i){
			IBody * body = ppBodies[i];
			
			BOOLEAN bodyTracked;
			HRESULT hr = body->get_IsTracked(&bodyTracked);

			if(!SUCCEEDED(hr) || !bodyTracked) continue;

			Joint joints[JointType_Count];
			hr = body->GetJoints(JointType_Count, joints);

			if(!SUCCEEDED(hr)) continue;

			float score=0;
			for(int j=0;j<JointType_Count;++j){
				score += trackingStateTable[joints[j].TrackingState];
			}

			if(score > bestScore){
				bestScore = score;
				bestBody = i;
			}
		}

		if (bestBody == 0xff){
			m_bSkeletonIsGood = false;
			m_nBodyIndex = 0xff;
			return;
		}
		HRESULT hr = ppBodies[bestBody]->GetJoints(JointType_Count, m_pJoints);

		if(!SUCCEEDED(hr)){
			std::cerr << "Error saving joints\n";
			m_bSkeletonIsGood = false;
			m_nBodyIndex = 0xff;
		}else{
			m_bSkeletonIsGood = true;
			m_nBodyIndex = bestBody;
		}
	}
Exemple #2
0
IBody* IBody::clone()
{
	if (m_filename == "") return NULL;

	IBody* body = BodyFactory::createBody(m_filename, m_scale);

	body->m_filename = m_filename;
	body->m_alive = m_alive;
	body->m_world = m_world;
	body->m_scale = m_scale;

	b2Body* b2body = body->getBody();
	b2body->SetTransform(m_body->GetPosition(), m_body->GetAngle());

	return body;
}
Exemple #3
0
void MyKinect2::ProcessBody(int nBodyCount, IBody** ppBodies)
{

    if (m_pCoordinateMapper)
    {
        for (int i = 0; i < nBodyCount; ++i)
        {
            IBody* pBody = ppBodies[i];
            if (pBody)
            {
                BOOLEAN bTracked = false;
                HRESULT hr = pBody->get_IsTracked(&bTracked);

                if (SUCCEEDED(hr) && bTracked)
                {
                    Joint joints[JointType_Count];

                    cv::vector<Joint> askeleton;

                    hr = pBody->GetJoints(_countof(joints), joints);
                    if (SUCCEEDED(hr))
                    {
                        vec_skeletons.clear();
                        for(uint j = 0; j < 20; j++)    // la kinect 2 prend 25 points (JointType_Count) mais on en prend 20 ici pour
                            // ne pas avoir à modifier le reste du code. 4 points supplémentaires correspondent au doigts donc ne sont pas ici necessaires. Le dernier point restant étant le centre des épaules. On ne l'a pas prit ici pour avoir exactement la meme chose qu'avec la kinect 1
                        {
                            askeleton.push_back(joints[j]);
                        }
                        vec_skeletons.push_back(askeleton);
                        if (is_recording && askeleton.size() > 0) {
                            vec_saved_skeletons.push_back(askeleton);
                            clock_t milliseconds = (clock() - this->startingClock) / (CLOCKS_PER_SEC / 1000);
                            vec_saved_times.push_back(milliseconds);
                        }
                        emit signal_changeskelpos();
                    }
                }
            }
        }
    }
}
Exemple #4
0
/// Handle new body data
void CBodyBasics::ProcessBody(int nBodyCount, IBody** ppBodies)
{
	//记录操作结果是否成功
	HRESULT hr;

	//对于每一个IBody
	for (int i = 0; i < nBodyCount; ++i)
	{
		IBody* pBody = ppBodies[i];
		if (pBody)//还没有搞明白这里pBody和下面的bTracked有什么区别
		{
			BOOLEAN bTracked = false;
			hr = pBody->get_IsTracked(&bTracked);

			if (SUCCEEDED(hr) && bTracked)
			{
				Joint joints[JointType_Count];//存储关节点类
				HandState leftHandState = HandState_Unknown;//左手状态
				HandState rightHandState = HandState_Unknown;//右手状态

				//获取左右手状态
				pBody->get_HandLeftState(&leftHandState);
				pBody->get_HandRightState(&rightHandState);

				//存储深度坐标系中的关节点位置
				DepthSpacePoint *depthSpacePosition = new DepthSpacePoint[_countof(joints)];

				//获得关节点类
				hr = pBody->GetJoints(_countof(joints), joints);
				if (SUCCEEDED(hr))
				{
					clear = false;
					for (int j = 0; j < _countof(joints); ++j)
					{
						//将关节点坐标从摄像机坐标系(-1~1)转到深度坐标系(424*512)
						m_pCoordinateMapper->MapCameraPointToDepthSpace(joints[j].Position, &depthSpacePosition[j]);
					}
					bodyXY[i][0] = depthSpacePosition[JointType_SpineMid].X;
					bodyXY[i][1] = depthSpacePosition[JointType_SpineMid].Y;
					angle[i]	 = atan(joints[JointType_SpineMid].Position.X / joints[JointType_SpineMid].Position.Z);
					if ( 0 > bodyXY[i][0] || bodyXY[i][0] > 512 || 0 > bodyXY[i][1] || bodyXY[i][1] > 424 )
					{
						bodyXY[i][0] = -1; 
						bodyXY[i][1] = -1; 
					}

					//------------------------hand state left-------------------------------
					DrawHandState(depthSpacePosition[JointType_HandLeft], leftHandState);
					DrawHandState(depthSpacePosition[JointType_HandRight], rightHandState);

					//---------------------------body-------------------------------
					DrawBone(joints, depthSpacePosition, JointType_Head, JointType_Neck);
					DrawBone(joints, depthSpacePosition, JointType_Neck, JointType_SpineShoulder);
					DrawBone(joints, depthSpacePosition, JointType_SpineShoulder, JointType_SpineMid);
					DrawBone(joints, depthSpacePosition, JointType_SpineMid, JointType_SpineBase);
					DrawBone(joints, depthSpacePosition, JointType_SpineShoulder, JointType_ShoulderRight);
					DrawBone(joints, depthSpacePosition, JointType_SpineShoulder, JointType_ShoulderLeft);
					DrawBone(joints, depthSpacePosition, JointType_SpineBase, JointType_HipRight);
					DrawBone(joints, depthSpacePosition, JointType_SpineBase, JointType_HipLeft);

					// -----------------------Right Arm ------------------------------------ 
					DrawBone(joints, depthSpacePosition, JointType_ShoulderRight, JointType_ElbowRight);
					DrawBone(joints, depthSpacePosition, JointType_ElbowRight, JointType_WristRight);
					DrawBone(joints, depthSpacePosition, JointType_WristRight, JointType_HandRight);
					DrawBone(joints, depthSpacePosition, JointType_HandRight, JointType_HandTipRight);
					DrawBone(joints, depthSpacePosition, JointType_WristRight, JointType_ThumbRight);

					//----------------------------------- Left Arm--------------------------
					DrawBone(joints, depthSpacePosition, JointType_ShoulderLeft, JointType_ElbowLeft);
					DrawBone(joints, depthSpacePosition, JointType_ElbowLeft, JointType_WristLeft);
					DrawBone(joints, depthSpacePosition, JointType_WristLeft, JointType_HandLeft);
					DrawBone(joints, depthSpacePosition, JointType_HandLeft, JointType_HandTipLeft);
					DrawBone(joints, depthSpacePosition, JointType_WristLeft, JointType_ThumbLeft);

					// ----------------------------------Right Leg--------------------------------
					DrawBone(joints, depthSpacePosition, JointType_HipRight, JointType_KneeRight);
					DrawBone(joints, depthSpacePosition, JointType_KneeRight, JointType_AnkleRight);
					DrawBone(joints, depthSpacePosition, JointType_AnkleRight, JointType_FootRight);

					// -----------------------------------Left Leg---------------------------------
					DrawBone(joints, depthSpacePosition, JointType_HipLeft, JointType_KneeLeft);
					DrawBone(joints, depthSpacePosition, JointType_KneeLeft, JointType_AnkleLeft);
					DrawBone(joints, depthSpacePosition, JointType_AnkleLeft, JointType_FootLeft);
				}
				delete[] depthSpacePosition;
			}
		}
	}
	cv::imshow("skeletonImg", skeletonImg);
	cv::waitKey(5);
}
Exemple #5
0
int main(int argc, char** argv) {
	// 1a. Get default Sensor
	std::cout << "Try to get default sensor" << std::endl;
	IKinectSensor* pSensor = nullptr;
	if (GetDefaultKinectSensor(&pSensor) != S_OK) {
		cerr << "Get Sensor failed" << std::endl;
		return -1;
	}

	// 1b. Open sensor
	std::cout << "Try to open sensor" << std::endl;
	if (pSensor->Open() != S_OK) {
		cerr << "Can't open sensor" << std::endl;
		return -1;
	}

	// 2. Color Related code
	IColorFrameReader* pColorFrameReader = nullptr;
	cv::Mat	mColorImg;
	UINT uBufferSize = 0;
	{
		// 2a. Get color frame source
		std::cout << "Try to get color source" << std::endl;
		IColorFrameSource* pFrameSource = nullptr;
		if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK) {
			cerr << "Can't get color frame source" << std::endl;
			return -1;
		}

		// 2b. Get frame description
		std::cout << "get color frame description" << std::endl;
		int		iWidth = 0;
		int		iHeight = 0;
		IFrameDescription* pFrameDescription = nullptr;
		if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK)	{
			pFrameDescription->get_Width(&iWidth);
			pFrameDescription->get_Height(&iHeight);
		}
		pFrameDescription->Release();
		pFrameDescription = nullptr;

		// 2c. get frame reader
		std::cout << "Try to get color frame reader" << std::endl;
		if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK) {
			cerr << "Can't get color frame reader" << std::endl;
			return -1;
		}

		// 2d. release Frame source
		std::cout << "Release frame source" << std::endl;
		pFrameSource->Release();
		pFrameSource = nullptr;

		// Prepare OpenCV data
		mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4);
		uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE);
	}

	// 3. Body related code
	IBodyFrameReader* pBodyFrameReader = nullptr;
	IBody** aBodyData = nullptr;
	INT32 iBodyCount = 0;
	{
		// 3a. Get frame source
		std::cout << "Try to get body source" << std::endl;
		IBodyFrameSource* pFrameSource = nullptr;
		if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK) {
			cerr << "Can't get body frame source" << std::endl;
			return -1;
		}

		// 3b. Get the number of body
		if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK) {
			cerr << "Can't get body count" << std::endl;
			return -1;
		}
		std::cout << " > Can trace " << iBodyCount << " bodies" << std::endl;
		aBodyData = new IBody*[iBodyCount];
		for (int i = 0; i < iBodyCount; ++i)
			aBodyData[i] = nullptr;

		// 3c. get frame reader
		std::cout << "Try to get body frame reader" << std::endl;
		if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK) {
			cerr << "Can't get body frame reader" << std::endl;
			return -1;
		}

		// 3d. release Frame source
		std::cout << "Release frame source" << std::endl;
		pFrameSource->Release();
		pFrameSource = nullptr;
	}

	// 4. get CoordinateMapper
	ICoordinateMapper* pCoordinateMapper = nullptr;
	if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK) {
		std::cout << "Can't get coordinate mapper" << std::endl;
		return -1;
	}

	// Enter main loop
	cv::namedWindow("Body Image");

	// Debug:output the velocity of joints
	ofstream current_average_velocityTXT("current_average_velocity.txt");
	ofstream average_velocityTXT("average_velocity.txt");
	int frame_count = 0;
	int frame_count_for_standby = 0;
	float positionX0[25] = {0};
	float positionX1[25] = {0};
	float positionY0[25] = { 0 };
	float positionY1[25] = { 0 };
	float positionZ0[25] = { 0 };
	float positionZ1[25] = { 0 };

	float velocityX[25] = { 0 };
	float velocityY[25] = { 0 };
	float velocityZ[25] = { 0 };
	float current_velocity[25] = { 0 };
	float velocityee[8] = { 0 };
	float current_total_velocity = 0;
	float current_average_velocity = 0;
	float total_velocity = 0;
	float average_velocity = 0;

	while (true)
	{
		// 4a. Get last frame
		IColorFrame* pColorFrame = nullptr;
		if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK)	 {
			// 4c. Copy to OpenCV image
			if (pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra) != S_OK)	{
				cerr << "Data copy error" << endl;
			}
			// 4e. release frame
			pColorFrame->Release();
		}
		cv::Mat mImg = mColorImg.clone();
		// 4b. Get body data
		IBodyFrame* pBodyFrame = nullptr;	
		if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK) {
			// 4b. get Body data
			if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK) {
				// 4c. for each body
				for (int i = 0; i < iBodyCount; ++i) {
					IBody* pBody = aBodyData[i];
					// check if is tracked
					BOOLEAN bTracked = false;
					if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked) {
						// get joint position
						Joint aJoints[JointType::JointType_Count];
						if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK) {
							DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper);

							DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper);

							DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper);

							DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HipLeft], aJoints[JointType_KneeLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_KneeLeft], aJoints[JointType_AnkleLeft], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_AnkleLeft], aJoints[JointType_FootLeft], pCoordinateMapper);

							DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_HipRight], aJoints[JointType_KneeRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_KneeRight], aJoints[JointType_AnkleRight], pCoordinateMapper);
							DrawLine(mImg, aJoints[JointType_AnkleRight], aJoints[JointType_FootRight], pCoordinateMapper);
						}
						// Debug:print out the number of frame					
						std::cout << "frame " << ++frame_count << std::endl;
					
						for (int j = 1; j < 8; j++) {
							velocityee[j] = velocityee[j-1];
							total_velocity += velocityee[j];
						}
						average_velocity = total_velocity / 8.0;
						
						if (average_velocity <= 0.0015) {	
							// determine if the person is still 
							if (frame_count_for_standby == 0) {
								PlaySound(TEXT("Alarm02.wav"), NULL, SND_FILENAME);
								std::cout << "Start capturing points!" << std::endl;
							}
							// count the number of frame whose velocity is below the threshold
							frame_count_for_standby++;
							if (frame_count_for_standby >= 5) {
								frame_count_for_standby = 0;
							}
						} 
						// Debug:output the average velocity 
						average_velocityTXT << frame_count << " " << average_velocity << std::endl;
						total_velocity = 0;
						// Update the average velocity
						int available_joints = 0;
						for (int i = 0; i < 25; i++) {
							// X 
							positionX1[i] = positionX0[i];
							positionX0[i] = aJoints[i].Position.X;
							velocityX[i] = (positionX1[i] - positionX0[i]) * (positionX1[i] - positionX0[i]);
							// Y
							positionY1[i] = positionY0[i];
							positionY0[i] = aJoints[i].Position.Y;
							velocityY[i] = (positionY1[i] - positionY0[i]) * (positionY1[i] - positionY0[i]);
							// Z
							positionZ1[i] = positionZ0[i];
							positionZ0[i] = aJoints[i].Position.Z;
							velocityZ[i] = (positionZ1[i] - positionZ0[i]) * (positionZ1[i] - positionZ0[i]);
							current_velocity[i] = sqrtf(velocityX[i] + velocityY[i] + velocityZ[i]);
							// exclude the discrete velocity
							if (current_velocity[i] < 0.01) {
								current_total_velocity += current_velocity[i];
								available_joints++;
							}
						}
						// If no joint is available, save the velocity of last frame
						if (available_joints != 0) {
							current_average_velocity = current_total_velocity / available_joints;
						}
						velocityee[0] = current_average_velocity;
						// Debug:output the current average velocity 
						current_average_velocityTXT << frame_count << " " << current_average_velocity << std::endl;
											
						current_total_velocity = 0;					
					}
				}
			} else {
				cerr << "Can't read body data" << endl;
			}
			// 4e. release frame
			pBodyFrame->Release();
		}
		// show image
		cv::imshow("Body Image",mImg);
		// 4c. check keyboard input
		if (cv::waitKey(30) == VK_ESCAPE) {
			break;
		}
	}
	// 3. delete body data array
	delete[] aBodyData;
	// 3. release frame reader
	std::cout << "Release body frame reader" << std::endl;
	pBodyFrameReader->Release();
	pBodyFrameReader = nullptr;
	// 2. release color frame reader
	std::cout << "Release color frame reader" << std::endl;
	pColorFrameReader->Release();
	pColorFrameReader = nullptr;
	// 1c. Close Sensor
	std::cout << "close sensor" << std::endl;
	pSensor->Close();
	// 1d. Release Sensor
	std::cout << "Release sensor" << std::endl;
	pSensor->Release();
	pSensor = nullptr;

	return 0;
}
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{
   
	//Declare a struct to collect data
	struct FRAME_RESUME actual_frame;
	
	if (m_hWnd)
    {
        HRESULT hr = EnsureDirect2DResources();

        if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper)
        {
            m_pRenderTarget->BeginDraw();
            m_pRenderTarget->Clear();

            RECT rct;
            GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
            int width = rct.right;
            int height = rct.bottom;

            for (int i = 0; i < nBodyCount; ++i)
            {
                IBody* pBody = ppBodies[i];
                if (pBody)
                {
                    BOOLEAN bTracked = false;
                    hr = pBody->get_IsTracked(&bTracked);

					//If the person is tracked we assign true '1', else we assign false '0'
					actual_frame.person[i].isTracked = bTracked;

					//If the person is tracked
                    if (SUCCEEDED(hr) && bTracked)
                    {
                        Joint joints[JointType_Count]; 
                        D2D1_POINT_2F jointPoints[JointType_Count];
                        HandState leftHandState = HandState_Unknown;
                        HandState rightHandState = HandState_Unknown;

                        pBody->get_HandLeftState(&leftHandState);
                        pBody->get_HandRightState(&rightHandState);

                        hr = pBody->GetJoints(_countof(joints), joints);
                        if (SUCCEEDED(hr))
                        {
                            
							for (int j = 0; j < _countof(joints); ++j)
                            {
                                jointPoints[j] = BodyToScreen(joints[j].Position, width, height);	

								//If current joint is not tracked
								if (joints[j].TrackingState == TrackingState_NotTracked)
								{
									actual_frame.person[i].body.joint[j].isTracked = 0;
									actual_frame.person[i].body.joint[j].coordinate3D.X = 0;
									actual_frame.person[i].body.joint[j].coordinate3D.Y = 0;
									actual_frame.person[i].body.joint[j].coordinate3D.Z = 0;
									actual_frame.person[i].body.joint[j].coordinate2D.X = 0;
									actual_frame.person[i].body.joint[j].coordinate2D.Y = 0;
								}

								else
								{
									if (joints[j].TrackingState == TrackingState_Tracked)
									{
										actual_frame.person[i].body.joint[j].isTracked = 1;
									}
									else
									{
										if (joints[j].TrackingState == TrackingState_Inferred)
										{
											actual_frame.person[i].body.joint[j].isTracked = 2;
										}
										else
										{
											actual_frame.person[i].body.joint[j].isTracked = 3;
										}
									}

									actual_frame.person[i].body.joint[j].coordinate3D.X = joints[j].Position.X;
									actual_frame.person[i].body.joint[j].coordinate3D.Y = joints[j].Position.Y;
									actual_frame.person[i].body.joint[j].coordinate3D.Z = joints[j].Position.Z;
									actual_frame.person[i].body.joint[j].coordinate2D.X = jointPoints[j].x;
									actual_frame.person[i].body.joint[j].coordinate2D.Y = jointPoints[j].y;
								}

                            }						
											
							
                            DrawBody(joints, jointPoints);

                            DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
                            DrawHand(rightHandState, jointPoints[JointType_HandRight]);
                        }											

						//Gesture values

						//Get body gesture
						//BodyGestureIdentification(actual_frame.person[i], &actual_frame.person[i].bodyGesture.type, &actual_frame.person[i].bodyGesture.parameter1);
						BodyPoseIdentification(actual_frame.person[i], &actual_frame.person[i].bodyPose.type);
						//Get hand gestures
						actual_frame.person[i].leftHandGesture.type = leftHandState;
						actual_frame.person[i].leftHandGesture.parameter1 = 0;
						actual_frame.person[i].rightHandGesture.type = rightHandState;
						actual_frame.person[i].rightHandGesture.parameter1 = 0;


                    }

					//If the person is not tracked put all its values in cero
					else
					{
						//All joints go to zero
						for (int j = 0; j <25; j++)
						{
							actual_frame.person[i].body.joint[j].isTracked = 0;
							actual_frame.person[i].body.joint[j].coordinate3D.X = 0;
							actual_frame.person[i].body.joint[j].coordinate3D.Y = 0;
							actual_frame.person[i].body.joint[j].coordinate3D.Z = 0;
							actual_frame.person[i].body.joint[j].coordinate2D.X = 0;
							actual_frame.person[i].body.joint[j].coordinate2D.Y = 0;
						}

						//All gestures go to zero
						actual_frame.person[i].bodyPose.type = 0;
						actual_frame.person[i].bodyGesture.type = 0;
						actual_frame.person[i].bodyGesture.parameter1 = 0;
						actual_frame.person[i].leftHandGesture.type = 0;
						actual_frame.person[i].leftHandGesture.parameter1 = 0;
						actual_frame.person[i].rightHandGesture.type = 0;
						actual_frame.person[i].rightHandGesture.parameter1 = 0;

					}


                }

           }

		    hr = m_pRenderTarget->EndDraw();

			// Device lost, need to recreate the render target
            // We'll dispose it now and retry drawing
            if (D2DERR_RECREATE_TARGET == hr)
            {
                hr = S_OK;
                DiscardDirect2DResources();
            }
        }

        if (!m_nStartTime)
        {
            m_nStartTime = nTime;
        }

        double fps = 0.0;

        LARGE_INTEGER qpcNow = {0};
        if (m_fFreq)
        {
            if (QueryPerformanceCounter(&qpcNow))
            {
                if (m_nLastCounter)
                {
                    m_nFramesSinceUpdate++;
                    fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
                }
            }
        }

        WCHAR szStatusMessage[64];
        StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f    Time = %I64d", fps, (nTime - m_nStartTime));

        if (SetStatusMessage(szStatusMessage, 1000, false))
        {
            m_nLastCounter = qpcNow.QuadPart;
            m_nFramesSinceUpdate = 0;
        }
    }

	//Collected results are sent to a file
	write_a_frame_resume_in_file(actual_frame);

}
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{
    if (m_hWnd)
    {
        HRESULT hr = EnsureDirect2DResources();

        if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper)
        {
            m_pRenderTarget->BeginDraw();
            m_pRenderTarget->Clear();

            RECT rct;
            GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
            int width = rct.right;
            int height = rct.bottom;

            for (int i = 0; i < nBodyCount; ++i)
            {
                IBody* pBody = ppBodies[i];
                if (pBody)
                {
                    BOOLEAN bTracked = false;
                    hr = pBody->get_IsTracked(&bTracked);

                    if (SUCCEEDED(hr) && bTracked)
                    {
                        Joint joints[JointType_Count]; 
                        D2D1_POINT_2F jointPoints[JointType_Count];
                        HandState leftHandState = HandState_Unknown;
                        HandState rightHandState = HandState_Unknown;

                        pBody->get_HandLeftState(&leftHandState);
                        pBody->get_HandRightState(&rightHandState);

                        hr = pBody->GetJoints(_countof(joints), joints);
                        if (SUCCEEDED(hr))
                        {
                            for (int j = 0; j < _countof(joints); ++j)
                            {
                                jointPoints[j] = BodyToScreen(joints[j].Position, width, height);
                            }

                            DrawBody(joints, jointPoints);

                            DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
                            DrawHand(rightHandState, jointPoints[JointType_HandRight]);
                        }
						
                    }
                }
            }

            hr = m_pRenderTarget->EndDraw();

            // Device lost, need to recreate the render target
            // We'll dispose it now and retry drawing
            if (D2DERR_RECREATE_TARGET == hr)
            {
                hr = S_OK;
                DiscardDirect2DResources();
            }
        }
		FILE *fp;
		if ((fp = fopen("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\speech.txt", "r")) != NULL) {
			if ((fp = fopen("body.txt", "r")) != NULL) {
				std::ifstream ifsa("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\speech.txt");
				std::string linea;
				int si = 0;
				while (std::getline(ifsa, linea)) {
					si++;
				}
				std::ifstream ifsb("body.txt");
				std::string lineb;
				int bi = 0;
				while (std::getline(ifsb, lineb)) {
					bi++;
				}
				if (si > bi) {
					isWrite = true;
					std::ofstream ofs("body.txt", std::ios::app);
					for (int j = 0; j < si - bi; j++) {
						ofs << "start" << std::endl;
					}
				}
			}
		}
		if ((fp = fopen("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\delete.txt", "r")) != NULL) {
			if ((fp = fopen("delete.txt", "r")) != NULL) {
				std::ifstream ifsa("C:\\Users\\tuchiyama\\Desktop\\SpeechBasics-D2D\\delete.txt");
				std::string linea;
				int si = 0;
				while (std::getline(ifsa, linea)) {
					si++;
				}
				std::ifstream ifsb("delete.txt");
				std::string lineb;
				int bi = 0;
				while (std::getline(ifsb, lineb)) {
					bi++;
				}
				if (si > bi) {
					system("ruby C:\\Users\\tuchiyama\\Documents\\odorimming\\make_html.rb undo");
					std::ofstream ofs("delete.txt", std::ios::app);
					for (int j = 0; j < si - bi; j++) {
						ofs << "delete" << std::endl;
					}
				}
			}
		}

        if (!m_nStartTime)
        {
            m_nStartTime = nTime;
        }

        double fps = 0.0;

        LARGE_INTEGER qpcNow = {0};
        if (m_fFreq)
        {
            if (QueryPerformanceCounter(&qpcNow))
            {
                if (m_nLastCounter)
                {
                    m_nFramesSinceUpdate+=1;
                    fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
                }
            }
        }

        WCHAR szStatusMessage[64];
        StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f    Time = %I64d", fps, (nTime - m_nStartTime));

        if (SetStatusMessage(szStatusMessage, 1000, false))
        {
            m_nLastCounter = qpcNow.QuadPart ;
            m_nFramesSinceUpdate = 0;
        }
    }
}
	void BreathingClass::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
	{
		double testVal = 0;

		if (m_hWnd)
		{
			HRESULT hr = EnsureDirect2DResources();

			if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper)
			{
				m_pRenderTarget->BeginDraw();
				m_pRenderTarget->Clear();

				RECT rct;
				GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
				int width = rct.right;
				int height = rct.bottom;



				for (int i = 0; i < nBodyCount; ++i)
				{
					IBody* pBody = ppBodies[i];
					if (pBody)
					{
						BOOLEAN bTracked = false;
						hr = pBody->get_IsTracked(&bTracked);

						if (SUCCEEDED(hr) && bTracked)
						{
							Joint joints[JointType_Count];
							D2D1_POINT_2F jointPoints[JointType_Count];
							HandState leftHandState = HandState_Unknown;
							HandState rightHandState = HandState_Unknown;


							pBody->get_HandLeftState(&leftHandState);
							pBody->get_HandRightState(&rightHandState);



							hr = pBody->GetJoints(_countof(joints), joints);
							if (SUCCEEDED(hr))
							{
								for (int j = 0; j < _countof(joints); ++j)
								{
									jointPoints[j] = BodyToScreen(joints[j].Position, width, height);
								}

								//DrawHand(leftHandState, jointPoints[JointType_ShoulderLeft]);

								float shoulderLeft = jointPoints[JointType_ShoulderLeft].y;
								float shoulderRight = jointPoints[JointType_ShoulderRight].y;
								float shoulderSpine = jointPoints[JointType_SpineShoulder].y;

								// average shoulder height
								testVal = (shoulderLeft + shoulderRight) / 2.0f * 100;

								BreathUpdate2(testVal);
							}
						}
					}
				}

				hr = m_pRenderTarget->EndDraw();

				// Device lost, need to recreate the render target
				// We'll dispose it now and retry drawing
				if (D2DERR_RECREATE_TARGET == hr)
				{
					hr = S_OK;
					DiscardDirect2DResources();
				}
			}

			if (!m_nStartTime)
			{
				m_nStartTime = nTime;
			}


			double fps = 0;

			if (GetBreathingIn())
				fps = 1.0f;
			else
				fps = 0.0f;

			LARGE_INTEGER qpcNow = { 0 };
			if (m_fFreq)
			{
				if (QueryPerformanceCounter(&qpcNow))
				{
					if (m_nLastCounter)
					{
						m_nFramesSinceUpdate++;
						// fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
					}
				}
			}

			WCHAR szStatusMessage[64];
			//StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f    Time = %I64d", fps, (nTime - m_nStartTime));

		
		}
	}
Exemple #9
0
void Kin2::getFaces(std::vector<k2::FaceData>& facesData)
{
    if (!(m_flags & k2::FACE))
    {
        mexPrintf("ERROR: NO FACE FUNCTIONALITY SELECTED!\n");
        return;
    }
        
	HRESULT hr;
	facesData.clear();

	// iterate through each face reader
	for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
	{
		// retrieve the latest face frame from this reader
		IFaceFrame* pFaceFrame = nullptr;
		hr = m_pFaceFrameReaders[iFace]->AcquireLatestFrame(&pFaceFrame);

		BOOLEAN bFaceTracked = false;
		if (SUCCEEDED(hr) && nullptr != pFaceFrame)
		{
			// check if a valid face is tracked in this face frame
			hr = pFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
		}

		if (SUCCEEDED(hr))
		{
			// If face tracked, save its data on the facesData structure array
			if (bFaceTracked)
			{
				IFaceFrameResult* pFaceFrameResult = nullptr;
				hr = pFaceFrame->get_FaceFrameResult(&pFaceFrameResult);

				k2::FaceData faceData;

				// need to verify if pFaceFrameResult contains data before trying to access it
				if (SUCCEEDED(hr) && pFaceFrameResult != nullptr)
				{
					hr = pFaceFrameResult->get_FaceBoundingBoxInColorSpace(&faceData.faceBox);

					if (SUCCEEDED(hr))
					{
						hr = pFaceFrameResult->GetFacePointsInColorSpace(FacePointType::FacePointType_Count, faceData.facePoints);
					}

					if (SUCCEEDED(hr))
					{
						hr = pFaceFrameResult->get_FaceRotationQuaternion(&faceData.faceRotation);
					}

					if (SUCCEEDED(hr))
					{
						hr = pFaceFrameResult->GetFaceProperties(FaceProperty::FaceProperty_Count, faceData.faceProperties);
					}

					facesData.push_back(faceData);
				}

				SafeRelease(pFaceFrameResult);
			}
			else
			{
				// face tracking is not valid - attempt to fix the issue
				// a valid body is required to perform this step
				if (m_bHaveBodyData)
				{
					// check if the corresponding body is tracked 
					// if this is true then update the face frame source to track this body
					IBody* pBody = m_ppBodies[iFace];
					if (pBody != nullptr)
					{
						BOOLEAN bTracked = false;
						hr = pBody->get_IsTracked(&bTracked);

						UINT64 bodyTId;
						if (SUCCEEDED(hr) && bTracked)
						{
							// get the tracking ID of this body
							hr = pBody->get_TrackingId(&bodyTId);
							if (SUCCEEDED(hr))
							{
								// update the face frame source with the tracking ID
								m_pFaceFrameSources[iFace]->put_TrackingId(bodyTId);
							}
						}
					}
				}
			}
		}

		SafeRelease(pFaceFrame);
	}
}
/// <summary>
/// Handle new depth and color data
/// <param name="nTime">timestamp of frame</param>
/// <param name="pDepthBuffer">pointer to depth frame data</param>
/// <param name="nDepthWidth">width (in pixels) of input depth image data</param>
/// <param name="nDepthHeight">height (in pixels) of input depth image data</param>
/// <param name="pColorBuffer">pointer to color frame data</param>
/// <param name="nColorWidth">width (in pixels) of input color image data</param>
/// <param name="nColorHeight">height (in pixels) of input color image data</param>
/// <param name="pBodyIndexBuffer">pointer to body index frame data</param>
/// <param name="nBodyIndexWidth">width (in pixels) of input body index data</param>
/// <param name="nBodyIndexHeight">height (in pixels) of input body index data</param>
/// </summary>
void CCoordinateMappingBasics::ProcessFrame(INT64 nTime, 
                                            const UINT16* pDepthBuffer, int nDepthWidth, int nDepthHeight, 
                                            const RGBQUAD* pColorBuffer, int nColorWidth, int nColorHeight,
                                            const BYTE* pBodyIndexBuffer, int nBodyIndexWidth, int nBodyIndexHeight,
											int nBodyCount, IBody** ppBodies)
{
    if (m_hWnd)
    {
        if (!m_nStartTime)
        {
            m_nStartTime = nTime;
        }

        double fps = 0.0;

        LARGE_INTEGER qpcNow = {0};
        if (m_fFreq)
        {
            if (QueryPerformanceCounter(&qpcNow))
            {
                if (m_nLastCounter)
                {
                    m_nFramesSinceUpdate++;
                    fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
                }
            }
        }

        WCHAR szStatusMessage[64];
        StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f    Time = %I64d", fps, (nTime - m_nStartTime));

        if (SetStatusMessage(szStatusMessage, 1000, false))
        {
            m_nLastCounter = qpcNow.QuadPart;
            m_nFramesSinceUpdate = 0;
        }
    }

    // Make sure we've received valid data
    if (m_pCoordinateMapper && m_pColorCoordinates && m_pOutputRGBX && 
        pDepthBuffer && (nDepthWidth == cDepthWidth) && (nDepthHeight == cDepthHeight) && 
        pColorBuffer && (nColorWidth == cColorWidth) && (nColorHeight == cColorHeight) &&
        pBodyIndexBuffer && (nBodyIndexWidth == cDepthWidth) && (nBodyIndexHeight == cDepthHeight))
    {
        HRESULT hr = m_pCoordinateMapper->MapDepthFrameToColorSpace(nDepthWidth * nDepthHeight, (UINT16*)pDepthBuffer,nDepthWidth * nDepthHeight, m_pColorCoordinates);
        if (SUCCEEDED(hr))
        {
            RGBQUAD c_green = {0, 255, 0}; 

            // loop over pixel of the output
            for (int depthIndex = 0; depthIndex < (nDepthWidth * nDepthHeight); ++depthIndex)
            {
                // default setting source to copy from the background pixel
                const RGBQUAD* pSrc = (m_pBackgroundRGBX) ? (m_pBackgroundRGBX + depthIndex) : &c_green; 

                BYTE player = pBodyIndexBuffer[depthIndex];

                // if we're tracking a player for the current pixel, draw from the color camera
                if (player != 0xff)
                {
                    // retrieve the depth to color mapping for the current depth pixel
                    ColorSpacePoint colorPoint = m_pColorCoordinates[depthIndex];

                    // make sure the depth pixel maps to a valid point in color space
                    int colorX = (int)(floor(colorPoint.X + 0.5));
                    int colorY = (int)(floor(colorPoint.Y + 0.5));
                    if ((colorX >= 0) && (colorX < nColorWidth) && (colorY >= 0) && (colorY < nColorHeight))
                    {
                        // calculate index into color array
                        int colorIndex = colorX + (colorY * nColorWidth);
                        // set source for copy to the color pixel
                        pSrc = m_pColorRGBX + colorIndex;
                    }
                }

                // write output
                m_pOutputRGBX[depthIndex] = *pSrc;
            }

            // Draw the data with Direct2D
            m_pDrawCoordinateMapping->Draw(reinterpret_cast<BYTE*>(m_pOutputRGBX), cDepthWidth * cDepthHeight * sizeof(RGBQUAD));

            if (m_bSaveScreenshot)
            {
                WCHAR szScreenshotPath[MAX_PATH];

                // Retrieve the path to My Photos
                GetScreenshotFileName(szScreenshotPath, _countof(szScreenshotPath));

                // Write out the bitmap to disk
                HRESULT hr = SaveBitmapToFile(reinterpret_cast<BYTE*>(m_pOutputRGBX), nDepthWidth, nDepthHeight, sizeof(RGBQUAD) * 8, szScreenshotPath);

                WCHAR szStatusMessage[64 + MAX_PATH];
                if (SUCCEEDED(hr))
                {
                    // Set the status bar to show where the screenshot was saved
                    StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Screenshot saved to %s", szScreenshotPath);
                }
                else
                {
                    StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Failed to write screenshot to %s", szScreenshotPath);
                }

                SetStatusMessage(szStatusMessage, 5000, true);

                // toggle off so we don't save a screenshot again next frame
                m_bSaveScreenshot = false;
            }
        }
    }

	D2D1_POINT_2F center;
	center.x = 400.0;
	center.y = 100.0;

	int width = 0;
	int height = 0;
	if (m_pCoordinateMapper)
	{

		RECT rct;
		GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
		width = rct.right;
		height = rct.bottom;

		DWORD clipedge = 0;

		for (int i = 0; i < nBodyCount; ++i)
		{
			IBody* pBody = ppBodies[i];
			if (pBody)
			{
				BOOLEAN bTracked = false;
				HRESULT hr = pBody->get_IsTracked(&bTracked);

				// Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。
				//hr = pBody->get_Engaged(&nEngaged[i]);
				// 以下はまだ使えないようだ
				//hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]);
				pBody->get_ClippedEdges(&clipedge);

				if (SUCCEEDED(hr) && bTracked)
				{
					Joint joints[JointType_Count];
					D2D1_POINT_2F jointPoints[JointType_Count];
					HandState leftHandState = HandState_Unknown;
					HandState rightHandState = HandState_Unknown;

					pBody->get_HandLeftState(&leftHandState);
					pBody->get_HandRightState(&rightHandState);

					hr = pBody->GetJoints(_countof(joints), joints);
					if (SUCCEEDED(hr))
					{
						for (int j = 0; j < _countof(joints); ++j)
						{
							jointPoints[j] = BodyToScreen(joints[j].Position, width, height);
						}

						m_pDrawCoordinateMapping->DrawBody(joints, jointPoints);

						// ここに頭部に丸を描いて、ボディ番号を表示
						m_pDrawCoordinateMapping->DrawHead(jointPoints[JointType_Head], i, clipedge/*, nEngaged[i]*/);

						m_pDrawCoordinateMapping->DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
						m_pDrawCoordinateMapping->DrawHand(rightHandState, jointPoints[JointType_HandRight]);

						// 手先がある領域にきたら実行
						// ボタンのような
						// 現状、複数人が認識されても実行するので、本来は最初に認識された一人のみにする必要がある。
						float xy[2] = { 0.0 };
						xy[0] = jointPoints[JointType_HandTipRight].x - center.x;
						xy[1] = jointPoints[JointType_HandTipRight].y - center.y;

						m_nButton = 0;
						if (sqrt(xy[0] * xy[0] + xy[1] * xy[1]) < 50.0)
						{
							m_nButton = 1;
						}
						m_pDrawCoordinateMapping->DrawButton(center, m_nButton);
					}
				}
			}
		}
		m_pDrawCoordinateMapping->EndDraw();
	}
}
Exemple #11
0
void KinectPlugin::ProcessBody(INT64 time, int bodyCount, IBody** bodies) {
    bool foundOneBody = false;
    if (_coordinateMapper) {
        for (int i = 0; i < bodyCount; ++i) {
            if (foundOneBody) {
                break;
            }
            IBody* body = bodies[i];
            if (body) {
                BOOLEAN tracked = false;
                HRESULT hr = body->get_IsTracked(&tracked);

                if (SUCCEEDED(hr) && tracked) {
                    foundOneBody = true;

                    if (_joints.size() != JointType_Count) {
                        _joints.resize(JointType_Count, { { 0.0f, 0.0f, 0.0f }, { 0.0f, 0.0f, 0.0f, 0.0f } });
                    }

                    Joint joints[JointType_Count];
                    JointOrientation jointOrientations[JointType_Count];
                    HandState leftHandState = HandState_Unknown;
                    HandState rightHandState = HandState_Unknown;

                    body->get_HandLeftState(&leftHandState);
                    body->get_HandRightState(&rightHandState);

                    hr = body->GetJoints(_countof(joints), joints);
                    hr = body->GetJointOrientations(_countof(jointOrientations), jointOrientations);

                    if (SUCCEEDED(hr)) {
                        auto jointCount = _countof(joints);
                        //qDebug() << __FUNCTION__ << "nBodyCount:" << nBodyCount << "body:" << i << "jointCount:" << jointCount;
                        for (int j = 0; j < jointCount; ++j) {
                            //QString jointName = kinectJointNames[joints[j].JointType];

                            glm::vec3 jointPosition { joints[j].Position.X,
                                                      joints[j].Position.Y,
                                                      joints[j].Position.Z };

                            // Kinect Documentation is unclear on what these orientations are, are they absolute? 
                            // or are the relative to the parent bones. It appears as if it has changed between the
                            // older 1.x SDK and the 2.0 sdk
                            //
                            // https://social.msdn.microsoft.com/Forums/en-US/31c9aff6-7dab-433d-9af9-59942dfd3d69/kinect-v20-preview-sdk-jointorientation-vs-boneorientation?forum=kinectv2sdk
                            // seems to suggest these are absolute...
                            //    "These quaternions are absolute, so you can take a mesh in local space, transform it by the quaternion, 
                            //    and it will match the exact orientation of the bone.  If you want relative orientation quaternion, you 
                            //    can multiply the absolute quaternion by the inverse of the parent joint's quaternion."
                            //
                            //  - Bone direction(Y green) - always matches the skeleton.
                            //  - Normal(Z blue) - joint roll, perpendicular to the bone
                            //  - Binormal(X orange) - perpendicular to the bone and normal

                            glm::quat jointOrientation { jointOrientations[j].Orientation.x,
                                                         jointOrientations[j].Orientation.y,
                                                         jointOrientations[j].Orientation.z,
                                                         jointOrientations[j].Orientation.w };

                            // filling in the _joints data...
                            if (joints[j].TrackingState != TrackingState_NotTracked) {
                                _joints[j].position = jointPosition;
                                //_joints[j].orientation = jointOrientation;
                            }
                        }
                    }
                }
            }
        }
    }
}
void CBodyBasics::TransmitBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{
	// UDP
	Message msg;
	PacketWriter pw;
	bool ok;
	HRESULT hr;

	
	for (int cptr = 0; cptr < clients.size(); cptr++)
	{
		if (clients[cptr].active == 1) {
			//printFucker("sending to client " + clients[cptr].address + ": " + std::to_string(nBodyCount) + " bodies!\n");

			// SEND FRAME START OVER UDP
			msg.init("/beginFrame");
			msg.pushInt32(nBodyCount);
			pw.init();
			pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle();
			ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize());


			for (int i = 0; i < nBodyCount; ++i)
			{
				IBody* pBody = ppBodies[i];
				if (pBody)
				{
					BOOLEAN bTracked = false;
					hr = pBody->get_IsTracked(&bTracked);

					if (SUCCEEDED(hr) && bTracked)
					{
						Joint joints[JointType_Count];
						D2D1_POINT_2F jointPoints[JointType_Count];
						HandState leftHandState = HandState_Unknown;
						HandState rightHandState = HandState_Unknown;

						pBody->get_HandLeftState(&leftHandState);
						pBody->get_HandRightState(&rightHandState);

						hr = pBody->GetJoints(_countof(joints), joints);
						if (SUCCEEDED(hr))
						{
							msg.init("/beginBody");
							msg.pushInt32(i);
							pw.init();
							pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle();
							ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize());

							for (int j = 0; j < _countof(joints); ++j)
							{
								// /kinect body joint x y z
								msg.init("/bodyJoint");
								msg.pushInt32(i);
								msg.pushInt32(j);
								// body relative - joints[1] is spineMid which maps to Torso in OpenNI
								msg.pushFloat(joints[j].Position.X - joints[1].Position.X);
								msg.pushFloat(joints[j].Position.Y - joints[1].Position.Y);
								msg.pushFloat(joints[j].Position.Z - joints[1].Position.Z);
								// send message
								pw.init();
								pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle();
								ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize());

							}

							msg.init("/endBody");
							msg.pushInt32(i);
							pw.init();
							pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle();
							ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize());
						}


					}
				}
			}


			// SEND FRAME END OVER UDP
			msg.init("/endFrame");
			pw.init();
			pw.startBundle().startBundle().addMessage(msg).endBundle().endBundle();
			ok = clients[cptr].socket.sendPacket(pw.packetData(), pw.packetSize());


		}
	}

}
Exemple #13
0
int main(int argc, char** argv)
{
   int first_time = 0;
   Size screen_size(1440, 900);//the dst image size,e.g.100x100
   Scalar text_color = Scalar(0, 255, 0);
   Scalar text_color2 = Scalar(0, 255, 255);
   Scalar text_color3 = Scalar(0, 0, 255);

   inhaler_coach coach;
   coach.control = 0;
   thread mThread(test_func, &coach);

   // 1a. Get Kinect Sensor
   cout << "Try to get default sensor" << endl;
   IKinectSensor* pSensor = nullptr;
   if (GetDefaultKinectSensor(&pSensor) != S_OK)
   {
      cerr << "Get Sensor failed" << endl;
      return -1;
   }

   // 1b. Open sensor
   cout << "Try to open sensor" << endl;
   if (pSensor->Open() != S_OK)
   {
      cerr << "Can't open sensor" << endl;
      return -1;
   }

   // 2. Color Related code
   IColorFrameReader* pColorFrameReader = nullptr;
   cv::Mat	mColorImg;
   UINT uBufferSize = 0;
   UINT uColorPointNum = 0;
   int iWidth = 0;
   int iHeight = 0;
   {
      // 2a. Get color frame source
      cout << "Try to get color source" << endl;
      IColorFrameSource* pFrameSource = nullptr;
      if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK)
      {
         cerr << "Can't get color frame source" << endl;
         return -1;
      }

      // 2b. Get frame description
      cout << "get color frame description" << endl;
      IFrameDescription* pFrameDescription = nullptr;
      if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK)
      {
         pFrameDescription->get_Width(&iWidth);
         pFrameDescription->get_Height(&iHeight);
      }
      pFrameDescription->Release();
      pFrameDescription = nullptr;

      // 2c. get frame reader
      cout << "Try to get color frame reader" << endl;
      if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK)
      {
         cerr << "Can't get color frame reader" << endl;
         return -1;
      }

      // 2d. release Frame source
      cout << "Release frame source" << endl;
      pFrameSource->Release();
      pFrameSource = nullptr;

      // Prepare OpenCV data
      mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4);
      uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE);
      uColorPointNum = iHeight * iWidth;
   }
   

   // 3. Depth related code
   IDepthFrameReader* pDepthFrameReader = nullptr;
   UINT uDepthPointNum = 0;
   int iDepthWidth = 0, iDepthHeight = 0;
   cout << "Try to get depth source" << endl;
   {
      // Get frame source
      IDepthFrameSource* pFrameSource = nullptr;
      if (pSensor->get_DepthFrameSource(&pFrameSource) != S_OK)
      {
         cerr << "Can't get depth frame source" << endl;
         return -1;
      }

      // Get frame description
      cout << "get depth frame description" << endl;
      IFrameDescription* pFrameDescription = nullptr;
      if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK)
      {
         pFrameDescription->get_Width(&iDepthWidth);
         pFrameDescription->get_Height(&iDepthHeight);
         uDepthPointNum = iDepthWidth * iDepthHeight;
      }
      pFrameDescription->Release();
      pFrameDescription = nullptr;

      // get frame reader
      cout << "Try to get depth frame reader" << endl;
      if (pFrameSource->OpenReader(&pDepthFrameReader) != S_OK)
      {
         cerr << "Can't get depth frame reader" << endl;
         return -1;
      }

      // release Frame source
      cout << "Release frame source" << endl;
      pFrameSource->Release();
      pFrameSource = nullptr;
   }



   // 4. Body related code
   IBodyFrameReader* pBodyFrameReader = nullptr;
   IBody** aBodyData = nullptr;
   INT32 iBodyCount = 0;
   {
      // 3a. Get frame source
      cout << "Try to get body source" << endl;
      IBodyFrameSource* pFrameSource = nullptr;
      if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK)
      {
         cerr << "Can't get body frame source" << endl;
         return -1;
      }

      // 3b. Get the number of body
      if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK)
      {
         cerr << "Can't get body count" << endl;
         return -1;
      }
      cout << " > Can trace " << iBodyCount << " bodies" << endl;
      aBodyData = new IBody*[iBodyCount];
      for (int i = 0; i < iBodyCount; ++i)
         aBodyData[i] = nullptr;

      // 3c. get frame reader
      cout << "Try to get body frame reader" << endl;
      if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK)
      {
         cerr << "Can't get body frame reader" << endl;
         return -1;
      }

      // 3d. release Frame source
      cout << "Release frame source" << endl;
      pFrameSource->Release();
      pFrameSource = nullptr;
   }
   


   // 4. Body Index releated code
   IBodyIndexFrameReader* pBIFrameReader = nullptr;
   cout << "Try to get body index source" << endl;
   {
      // Get frame source
      IBodyIndexFrameSource* pFrameSource = nullptr;
      if (pSensor->get_BodyIndexFrameSource(&pFrameSource) != S_OK)
      {
         cerr << "Can't get body index frame source" << endl;
         return -1;
      }

      // get frame reader
      cout << "Try to get body index frame reader" << endl;
      if (pFrameSource->OpenReader(&pBIFrameReader) != S_OK)
      {
         cerr << "Can't get depth frame reader" << endl;
         return -1;
      }

      // release Frame source
      cout << "Release frame source" << endl;
      pFrameSource->Release();
      pFrameSource = nullptr;
   }



   // 5. background
   cv::Mat imgBG(iHeight, iWidth, CV_8UC3);
   imgBG.setTo(0);







   // 4. get CoordinateMapper
   ICoordinateMapper* pCoordinateMapper = nullptr;
   if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK)
   {
      cout << "Can't get coordinate mapper" << endl;
      return -1;
   }

   // Enter main loop
   UINT16* pDepthPoints = new UINT16[uDepthPointNum];
   BYTE*	pBodyIndex = new BYTE[uDepthPointNum];
   DepthSpacePoint* pPointArray = new DepthSpacePoint[uColorPointNum];

   cv::namedWindow("Inhaler Coach");
   while (true)
   {
      // 4a. Get last frame
      IColorFrame* pColorFrame = nullptr;
      if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK)
      {
         pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra);
         pColorFrame->Release();
         pColorFrame = nullptr;
      }
      cv::Mat mImg = mColorImg.clone();
     
      // 8b. read depth frame
      IDepthFrame* pDepthFrame = nullptr;
      if (pDepthFrameReader->AcquireLatestFrame(&pDepthFrame) == S_OK)
      {
         pDepthFrame->CopyFrameDataToArray(uDepthPointNum, pDepthPoints);
         pDepthFrame->Release();
         pDepthFrame = nullptr;
      }


      // 8c. read body index frame
      IBodyIndexFrame* pBIFrame = nullptr;
      if (pBIFrameReader->AcquireLatestFrame(&pBIFrame) == S_OK)
      {
         pBIFrame->CopyFrameDataToArray(uDepthPointNum, pBodyIndex);
         pBIFrame->Release();
         pBIFrame = nullptr;
      }

#ifdef COACH_DEBUG
      cv::Mat imgTarget = imgBG.clone();
      // 9b. map color to depth
      if (pCoordinateMapper->MapColorFrameToDepthSpace(uDepthPointNum, pDepthPoints, uColorPointNum, pPointArray) == S_OK)
      {
         for (int y = 0; y < imgTarget.rows; ++y)
         {
            for (int x = 0; x < imgTarget.cols; ++x)
            {
               // ( x, y ) in color frame = rPoint in depth frame
               const DepthSpacePoint& rPoint = pPointArray[y * imgTarget.cols + x];

               // check if rPoint is in range
               if (rPoint.X >= 0 && rPoint.X < iDepthWidth && rPoint.Y >= 0 && rPoint.Y < iDepthHeight)
               {
                  // fill color from color frame if this pixel is user
                  int iIdx = (int)rPoint.X + iDepthWidth * (int)rPoint.Y;
                  if (pBodyIndex[iIdx] < 6)
                  {
                     cv::Vec4b& rPixel = mImg.at<cv::Vec4b>(y, x);
                     imgTarget.at<cv::Vec3b>(y, x) = cv::Vec3b(rPixel[0], rPixel[1], rPixel[2]);
                  }
               }
            }
         }
      }
#else
   cv::Mat imgTarget = mImg.clone();
#endif




      // 4b. Get body data
      IBodyFrame* pBodyFrame = nullptr;
      if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK)
      {

         // 4b. get Body data
         if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK)
         {
            // 4c. for each body
            for (int i = 0; i < iBodyCount; ++i)
            {
               IBody* pBody = aBodyData[i];

               // check if is tracked
               BOOLEAN bTracked = false;
               if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked)
               {
                  // get joint position
                  Joint aJoints[JointType::JointType_Count];
                  if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK)
                  {
                     
                     if (coach.state == 0){
                        coach.state = 1;
                        if (first_time == 0){
                           first_time = 1;
                           PlaySound(TEXT("welcome.wav"), NULL, SND_FILENAME);
                        }
                     }
#ifdef COACH_DEBUG               
                     DrawLine(imgTarget, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper);

                     DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper);
                     //DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper);

                     DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper);
                     //DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper);
#endif
                     ColorSpacePoint q;
                     ColorSpacePoint head;
                     //ColorSpacePoint w;

                     pCoordinateMapper->MapCameraPointToColorSpace(aJoints[JointType_Head].Position, &head);
                     // check shaking
                     coach.shaking_detection(aJoints, pCoordinateMapper);
                     q = coach.position_checking(aJoints, pCoordinateMapper);
#ifdef COACH_DEBUG
                     circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0);
                     //circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0);
                     rectangle(imgTarget, Point(head.X - 50, head.Y - 40), Point(head.X + 50, head.Y + 90), Scalar(0, 255, 255), 1, 8, 0);
                     //circle(imgTarget, cv::Point(w.X, w.Y), 10, Scalar(255, 0, 255), 10, 8, 0);
#endif
                     coach.state_change_rule();
                  }
               }               
            }
         }
         else
         {
            cerr << "Can't read body data" << endl;
         }

         // 4e. release frame
         pBodyFrame->Release();
      }


      switch (coach.state){
         case 0: putText(imgTarget, "CMU Inhaler Coaching System", Point(120, 120), FONT_HERSHEY_DUPLEX, 2, text_color); break;
         case 1: putText(imgTarget, "Please shake the inhaler", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break;
         case 2: putText(imgTarget, "Shaking detected", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break;
         case 3: putText(imgTarget, "Please put the inhaler in front of your mouth", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break;
         case 4: putText(imgTarget, "Position check OK", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break;
         case 5: putText(imgTarget, "You forget to shake the inhaler first!!!", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color3); break;
      }

      // show image
      Mat dst;
      resize(imgTarget, dst, screen_size);
      imshow("Coach", dst);
      // 4c. check keyboard input
      if (cv::waitKey(30) == VK_ESCAPE){
         break;
      }
   }
   mThread.join();
   
   // 3. delete body data array
   delete[] aBodyData;

   // 3. release frame reader
   cout << "Release body frame reader" << endl;
   pBodyFrameReader->Release();
   pBodyFrameReader = nullptr;

   // 2. release color frame reader
   cout << "Release color frame reader" << endl;
   pColorFrameReader->Release();
   pColorFrameReader = nullptr;

   // 1c. Close Sensor
   cout << "close sensor" << endl;
   pSensor->Close();

   // 1d. Release Sensor
   cout << "Release sensor" << endl;
   pSensor->Release();
   pSensor = nullptr;

   return 0;
}
Exemple #14
0
void Device::update()
{
    if ( mFrameReader == 0 ) {
        return;
    }

    IAudioBeamFrame* audioFrame								= 0;
    IBodyFrame* bodyFrame									= 0;
    IBodyIndexFrame* bodyIndexFrame							= 0;
    IColorFrame* colorFrame									= 0;
    IDepthFrame* depthFrame									= 0;
    IMultiSourceFrame* frame								= 0;
    IInfraredFrame* infraredFrame							= 0;
    ILongExposureInfraredFrame* infraredLongExposureFrame	= 0;

    HRESULT hr = mFrameReader->AcquireLatestFrame( &frame );

    if ( SUCCEEDED( hr ) && mDeviceOptions.isAudioEnabled() ) {
        // TODO audio
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) {
        IBodyFrameReference* frameRef = 0;
        hr = frame->get_BodyFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) {
        IBodyIndexFrameReference* frameRef = 0;
        hr = frame->get_BodyIndexFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyIndexFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) {
        IColorFrameReference* frameRef = 0;
        hr = frame->get_ColorFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &colorFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) {
        IDepthFrameReference* frameRef = 0;
        hr = frame->get_DepthFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &depthFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) {
        IInfraredFrameReference* frameRef = 0;
        hr = frame->get_InfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) {
        ILongExposureInfraredFrameReference* frameRef = 0;
        hr = frame->get_LongExposureInfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredLongExposureFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) ) {
        long long timeStamp										= 0L;

        // TODO audio

        std::vector<Body> bodies;
        int64_t bodyTime										= 0L;
        IBody* kinectBodies[ BODY_COUNT ]						= { 0 };
        Vec4f floorClipPlane									= Vec4f::zero();

        Channel8u bodyIndexChannel;
        IFrameDescription* bodyIndexFrameDescription			= 0;
        int32_t bodyIndexWidth									= 0;
        int32_t bodyIndexHeight									= 0;
        uint32_t bodyIndexBufferSize							= 0;
        uint8_t* bodyIndexBuffer								= 0;
        int64_t bodyIndexTime									= 0L;

        Surface8u colorSurface;
        IFrameDescription* colorFrameDescription				= 0;
        int32_t colorWidth										= 0;
        int32_t colorHeight										= 0;
        ColorImageFormat colorImageFormat						= ColorImageFormat_None;
        uint32_t colorBufferSize								= 0;
        uint8_t* colorBuffer									= 0;

        Channel16u depthChannel;
        IFrameDescription* depthFrameDescription				= 0;
        int32_t depthWidth										= 0;
        int32_t depthHeight										= 0;
        uint16_t depthMinReliableDistance						= 0;
        uint16_t depthMaxReliableDistance						= 0;
        uint32_t depthBufferSize								= 0;
        uint16_t* depthBuffer									= 0;

        Channel16u infraredChannel;
        IFrameDescription* infraredFrameDescription				= 0;
        int32_t infraredWidth									= 0;
        int32_t infraredHeight									= 0;
        uint32_t infraredBufferSize								= 0;
        uint16_t* infraredBuffer								= 0;

        Channel16u infraredLongExposureChannel;
        IFrameDescription* infraredLongExposureFrameDescription	= 0;
        int32_t infraredLongExposureWidth						= 0;
        int32_t infraredLongExposureHeight						= 0;
        uint32_t infraredLongExposureBufferSize					= 0;
        uint16_t* infraredLongExposureBuffer					= 0;

        hr = depthFrame->get_RelativeTime( &timeStamp );

        // TODO audio
        if ( mDeviceOptions.isAudioEnabled() ) {

        }

        if ( mDeviceOptions.isBodyEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->get_RelativeTime( &bodyTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->GetAndRefreshBodyData( BODY_COUNT, kinectBodies );
            }
            if ( SUCCEEDED( hr ) ) {
                Vector4 v;
                hr = bodyFrame->get_FloorClipPlane( &v );
                floorClipPlane = toVec4f( v );
            }
            if ( SUCCEEDED( hr ) ) {
                for ( uint8_t i = 0; i < BODY_COUNT; ++i ) {
                    IBody* kinectBody = kinectBodies[ i ];
                    if ( kinectBody != 0 ) {
                        uint8_t isTracked	= false;
                        hr					= kinectBody->get_IsTracked( &isTracked );
                        if ( SUCCEEDED( hr ) && isTracked ) {
                            Joint joints[ JointType_Count ];
                            kinectBody->GetJoints( JointType_Count, joints );

                            JointOrientation jointOrientations[ JointType_Count ];
                            kinectBody->GetJointOrientations( JointType_Count, jointOrientations );

                            uint64_t id = 0;
                            kinectBody->get_TrackingId( &id );

                            std::map<JointType, Body::Joint> jointMap;
                            for ( int32_t j = 0; j < JointType_Count; ++j ) {
                                Body::Joint joint(
                                    toVec3f( joints[ j ].Position ),
                                    toQuatf( jointOrientations[ j ].Orientation ),
                                    joints[ j ].TrackingState
                                );
                                jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) );
                            }
                            Body body( id, i, jointMap );
                            bodies.push_back( body );
                        }
                    }
                }
            }
        }

        if ( mDeviceOptions.isBodyIndexEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight );
                memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) );
            }
        }

        if ( mDeviceOptions.isColorEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_FrameDescription( &colorFrameDescription );
                if ( SUCCEEDED( hr ) ) {
                    float vFov = 0.0f;
                    float hFov = 0.0f;
                    float dFov = 0.0f;
                    colorFrameDescription->get_VerticalFieldOfView( &vFov );
                    colorFrameDescription->get_HorizontalFieldOfView( &hFov );
                    colorFrameDescription->get_DiagonalFieldOfView( &dFov );
                }
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Width( &colorWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Height( &colorHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_RawColorImageFormat( &colorImageFormat );
            }
            if ( SUCCEEDED( hr ) ) {
                colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4;
                colorBuffer		= new uint8_t[ colorBufferSize ];
                hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba );

                if ( SUCCEEDED( hr ) ) {
                    colorSurface = Surface8u( colorWidth, colorHeight, false, SurfaceChannelOrder::RGBA );
                    memcpy( colorSurface.getData(), colorBuffer, colorWidth * colorHeight * sizeof( uint8_t ) * 4 );
                }

                delete [] colorBuffer;
                colorBuffer = 0;
            }
        }

        if ( mDeviceOptions.isDepthEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_FrameDescription( &depthFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Width( &depthWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Height( &depthHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                depthChannel = Channel16u( depthWidth, depthHeight );
                memcpy( depthChannel.getData(), depthBuffer, depthWidth * depthHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->get_FrameDescription( &infraredFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Width( &infraredWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Height( &infraredHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredChannel = Channel16u( infraredWidth, infraredHeight );
                memcpy( infraredChannel.getData(), infraredBuffer,  infraredWidth * infraredHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredLongExposureEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight );
                memcpy( infraredLongExposureChannel.getData(), infraredLongExposureBuffer, infraredLongExposureWidth * infraredLongExposureHeight * sizeof( uint16_t ) );
            }
        }

        if ( SUCCEEDED( hr ) ) {
            mFrame.mBodies						= bodies;
            mFrame.mChannelBodyIndex			= bodyIndexChannel;
            mFrame.mChannelDepth				= depthChannel;
            mFrame.mChannelInfrared				= infraredChannel;
            mFrame.mChannelInfraredLongExposure	= infraredLongExposureChannel;
            mFrame.mDeviceId					= mDeviceOptions.getDeviceId();
            mFrame.mSurfaceColor				= colorSurface;
            mFrame.mTimeStamp					= timeStamp;
            mFrame.mFloorClipPlane				= floorClipPlane;
        }

        if ( bodyIndexFrameDescription != 0 ) {
            bodyIndexFrameDescription->Release();
            bodyIndexFrameDescription = 0;
        }
        if ( colorFrameDescription != 0 ) {
            colorFrameDescription->Release();
            colorFrameDescription = 0;
        }
        if ( depthFrameDescription != 0 ) {
            depthFrameDescription->Release();
            depthFrameDescription = 0;
        }
        if ( infraredFrameDescription != 0 ) {
            infraredFrameDescription->Release();
            infraredFrameDescription = 0;
        }
        if ( infraredLongExposureFrameDescription != 0 ) {
            infraredLongExposureFrameDescription->Release();
            infraredLongExposureFrameDescription = 0;
        }
    }

    if ( audioFrame != 0 ) {
        audioFrame->Release();
        audioFrame = 0;
    }
    if ( bodyFrame != 0 ) {
        bodyFrame->Release();
        bodyFrame = 0;
    }
    if ( bodyIndexFrame != 0 ) {
        bodyIndexFrame->Release();
        bodyIndexFrame = 0;
    }
    if ( colorFrame != 0 ) {
        colorFrame->Release();
        colorFrame = 0;
    }
    if ( depthFrame != 0 ) {
        depthFrame->Release();
        depthFrame = 0;
    }
    if ( frame != 0 ) {
        frame->Release();
        frame = 0;
    }
    if ( infraredFrame != 0 ) {
        infraredFrame->Release();
        infraredFrame = 0;
    }
    if ( infraredLongExposureFrame != 0 ) {
        infraredLongExposureFrame->Release();
        infraredLongExposureFrame = 0;
    }
}
/// <summary>
/// Processes new face frames
/// </summary>
void CFaceBasics::ProcessFaces()
{
    HRESULT hr;
    IBody* ppBodies[BODY_COUNT] = {0};
    bool bHaveBodyData = SUCCEEDED( UpdateBodyData(ppBodies) );

	UINT32 vertexCount = 0;
	hr = GetFaceModelVertexCount(&vertexCount);

	UINT colorSpaceCount = vertexCount;
	ColorSpacePoint * pFaceColors = new ColorSpacePoint[colorSpaceCount];

    // iterate through each face reader
    for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
    {
        // retrieve the latest face frame from this reader
        IFaceFrame* pFaceFrame = nullptr;
        hr = m_pFaceFrameReaders[iFace]->AcquireLatestFrame(&pFaceFrame);
		
        BOOLEAN bFaceTracked = false;
        if (SUCCEEDED(hr) && nullptr != pFaceFrame)
        {
            // check if a valid face is tracked in this face frame
            hr = pFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
        }
		
        if (SUCCEEDED(hr))
        {
            if (bFaceTracked)
            {
                IFaceFrameResult* pFaceFrameResult = nullptr;
                RectI faceBox = {0};
                PointF facePoints[FacePointType::FacePointType_Count];
                Vector4 faceRotation;
                DetectionResult faceProperties[FaceProperty::FaceProperty_Count];
                D2D1_POINT_2F faceTextLayout;

                hr = pFaceFrame->get_FaceFrameResult(&pFaceFrameResult);

                // need to verify if pFaceFrameResult contains data before trying to access it
                if (SUCCEEDED(hr) && pFaceFrameResult != nullptr)
                {
                    hr = pFaceFrameResult->get_FaceBoundingBoxInColorSpace(&faceBox);

                    if (SUCCEEDED(hr))
                    {										
                        hr = pFaceFrameResult->GetFacePointsInColorSpace(FacePointType::FacePointType_Count, facePoints);
                    }

                    if (SUCCEEDED(hr))
                    {
                        hr = pFaceFrameResult->get_FaceRotationQuaternion(&faceRotation);
                    }

                    if (SUCCEEDED(hr))
                    {
                        hr = pFaceFrameResult->GetFaceProperties(FaceProperty::FaceProperty_Count, faceProperties);
                    }

                    if (SUCCEEDED(hr))
                    {
                        hr = GetFaceTextPositionInColorSpace(ppBodies[iFace], &faceTextLayout);
                    }

                    if (SUCCEEDED(hr))
                    {
						//let's see if we can get hd face frame here
						// retrieve the latest face frame from this reader
						IHighDefinitionFaceFrame* phdFaceFrame = nullptr;
						hr = m_phdFaceFrameReaders[iFace]->AcquireLatestFrame(&phdFaceFrame);
						if (SUCCEEDED(hr) && nullptr != phdFaceFrame)
						{
							//we have a hd face frame so get the vertices							
							hr = phdFaceFrame->GetAndRefreshFaceAlignmentResult(m_phdFaceAlignments[iFace]);

							IFaceModel * pFaceModel = nullptr;
							if (SUCCEEDED(hr))
							{
								//we have updated the faceAlignment results
								hr = phdFaceFrame->get_FaceModel(&pFaceModel);
								if (SUCCEEDED(hr) && nullptr != pFaceModel)
								{	
									 CameraSpacePoint * pFacePoints = new CameraSpacePoint[vertexCount];
									hr = pFaceModel->CalculateVerticesForAlignment(m_phdFaceAlignments[iFace], vertexCount, pFacePoints);

									const CameraSpacePoint * pConstFacePoints = pFacePoints;								//now convert cameraspace points to colorspacepoints
									hr = m_pCoordinateMapper->MapCameraPointsToColorSpace(vertexCount, pConstFacePoints, colorSpaceCount, pFaceColors);
									
									if (FAILED(hr))
									{
										pFaceColors = nullptr;
									}
									delete pFacePoints;
								}
								SafeRelease(pFaceModel);
							}

							SafeRelease(phdFaceFrame);
						}
						
						

						if (nullptr != pFaceColors)
						{
							
							m_pDrawDataStreams->DrawFaceFrameResults(iFace, &faceBox, facePoints, &faceRotation, faceProperties, &faceTextLayout, pFaceColors);
						}
						else
						{
							// draw face frame results
							m_pDrawDataStreams->DrawFaceFrameResults(iFace, &faceBox, facePoints, &faceRotation, faceProperties, &faceTextLayout);
						}
                    }							
                }

                SafeRelease(pFaceFrameResult);				

            }
            else 
            {	
                // face tracking is not valid - attempt to fix the issue
                // a valid body is required to perform this step
                if (bHaveBodyData)
                {
                    // check if the corresponding body is tracked 
                    // if this is true then update the face frame source to track this body
                    IBody* pBody = ppBodies[iFace];
                    if (pBody != nullptr)
                    {
                        BOOLEAN bTracked = false;
                        hr = pBody->get_IsTracked(&bTracked);

                        UINT64 bodyTId;
                        if (SUCCEEDED(hr) && bTracked)
                        {
                            // get the tracking ID of this body
                            hr = pBody->get_TrackingId(&bodyTId);
                            if (SUCCEEDED(hr))
                            {
                                // update the face frame source with the tracking ID
                                m_pFaceFrameSources[iFace]->put_TrackingId(bodyTId);
								m_phdFaceFrameSources[iFace]->put_TrackingId(bodyTId);
                            }
                        }
                    }
                }
            }
        }			

        SafeRelease(pFaceFrame);
    }

	delete pFaceColors;
    if (bHaveBodyData)
    {
        for (int i = 0; i < _countof(ppBodies); ++i)
        {
            SafeRelease(ppBodies[i]);
        }
    }
}
Exemple #16
0
//----------
void Body::update() {
    CHECK_OPEN

    IBodyFrame * frame = NULL;
    IFrameDescription * frameDescription = NULL;
    try {
        //acquire frame
        if (FAILED(this->reader->AcquireLatestFrame(&frame))) {
            return; // we often throw here when no new frame is available
        }
        INT64 nTime = 0;
        if (FAILED(frame->get_RelativeTime(&nTime))) {
            throw Exception("Failed to get relative time");
        }

        if (FAILED(frame->get_FloorClipPlane(&floorClipPlane))) {
            throw(Exception("Failed to get floor clip plane"));
        }

        IBody* ppBodies[BODY_COUNT] = {0};
        if (FAILED(frame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies))) {
            throw Exception("Failed to refresh body data");
        }

        for (int i = 0; i < BODY_COUNT; ++i) {
            auto & body = bodies[i];
            body.clear();

            IBody* pBody = ppBodies[i];
            if (pBody)
            {
                BOOLEAN bTracked = false;
                if (FAILED(pBody->get_IsTracked(&bTracked))) {
                    throw Exception("Failed to get tracking status");
                }
                body.tracked = bTracked;
                body.bodyId = i;

                if (bTracked)
                {
                    // retrieve tracking id

                    UINT64 trackingId = -1;

                    if (FAILED(pBody->get_TrackingId(&trackingId))) {
                        throw Exception("Failed to get tracking id");
                    }

                    body.trackingId = trackingId;

                    // retrieve joint position & orientation

                    _Joint joints[JointType_Count];
                    _JointOrientation jointsOrient[JointType_Count];

                    if (FAILED(pBody->GetJoints(JointType_Count, joints))) {
                        throw Exception("Failed to get joints");
                    }
                    if (FAILED(pBody->GetJointOrientations(JointType_Count, jointsOrient))) {
                        throw Exception("Failed to get joints orientation");
                    }

                    for (int j = 0; j < JointType_Count; ++j) {
                        body.joints[joints[j].JointType] = Data::Joint(joints[j], jointsOrient[j]);
                    }

                    // retrieve hand states

                    HandState leftHandState = HandState_Unknown;
                    HandState rightHandState = HandState_Unknown;

                    if (FAILED(pBody->get_HandLeftState(&leftHandState))) {
                        throw Exception("Failed to get left hand state");
                    }
                    if (FAILED(pBody->get_HandRightState(&rightHandState))) {
                        throw Exception("Failed to get right hand state");
                    }

                    body.leftHandState = leftHandState;
                    body.rightHandState = rightHandState;
                }
            }
        }

        for (int i = 0; i < _countof(ppBodies); ++i)
        {
            SafeRelease(ppBodies[i]);
        }
    }
    catch (std::exception & e) {
        OFXKINECTFORWINDOWS2_ERROR << e.what();
    }
    SafeRelease(frameDescription);
    SafeRelease(frame);
}
Exemple #17
0
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void testApp::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{

	HRESULT hr;
	int trackedBodies = 0;
	if (m_pCoordinateMapper)
	{
		//IBody* pBodyToTrack = NULL;
		//IBody* pBody2ToTrack = NULL;
		//UINT64 trackingId;
		//for (int i = 0; i < nBodyCount; ++i)
		//{
		//	IBody* pBody = ppBodies[i];
		//	if (lastBodyTrackingId == NULL || lastBody2TrackingId == NULL)
		//	{
		//		//Init a new body tracking
		//		if (pBody) {
		//			BOOLEAN bTracked = false;
		//			hr = pBody->get_IsTracked(&bTracked);

		//			if (SUCCEEDED(hr) && bTracked) {
		//				ofLogNotice("Body is tracked");
		//				if(lastBodyTrackingId == NULL)
		//					hr = pBody->get_TrackingId(&lastBodyTrackingId);
		//				else
		//					hr = pBody->get_TrackingId(&lastBody2TrackingId);

		//				if (SUCCEEDED(hr)) {
		//					ofLogNotice("Found body to track");
		//					pBodyToTrack = pBody;
		//				}
		//				break;
		//			}
		//		}
		//	}
		//	else {
		//		//Some body is already tracked
		//		if (pBody) {
		//			BOOLEAN bTracked = false;
		//			hr = pBody->get_IsTracked(&bTracked);

		//			if (SUCCEEDED(hr) && bTracked) {
		//				pBody->get_TrackingId(&trackingId);
		//				if (trackingId == lastBodyTrackingId) {
		//					pBodyToTrack = pBody;
		//				}
		//			}
		//		}
		//	}
		//}

		//if (pBodyToTrack == NULL && lastBodyTrackingId != NULL) {
		//	ofLogNotice("Lost body. Allowing new body to step in.");
		//	lastBodyTrackingId = NULL; //Allow new body to step in
		//}
		for (int i = 0; i < nBodyCount; ++i) {
			IBody* pBodyToTrack = ppBodies[i];
			if (pBodyToTrack)
			{
				BOOLEAN bTracked = false;
				hr = pBodyToTrack->get_IsTracked(&bTracked);

				if (SUCCEEDED(hr) && bTracked)
				{
					Joint joints[JointType_Count];
					ofVec2f jointPoints[JointType_Count];
					leftHandStates[i] = HandState_Unknown;
					rightHandStates[i] = HandState_Unknown;

					pBodyToTrack->get_HandLeftState(&leftHandStates[i]);
					pBodyToTrack->get_HandRightState(&rightHandStates[i]);

					hr = pBodyToTrack->GetJoints(_countof(joints), joints);
					if (SUCCEEDED(hr))
					{
						for (int j = 0; j < _countof(joints); ++j)
						{
							jointPoints[j] = BodyToScreen(joints[j].Position, 1024, 768);
						}

						lastChestPositions[trackedBodies] = jointPoints[JointType_Neck];
						lastHandPositionLeft[trackedBodies] = jointPoints[JointType_HandLeft];
						lastHandPositionRight[trackedBodies] = jointPoints[JointType_HandRight];
						pBodyToTrack->get_TrackingId(&lastBodyTrackingIds[trackedBodies]);
						trackedBodies++;
						
						//DrawBody(joints, jointPoints);
					}
				}
			}
		}
		

		//hr = m_pRenderTarget->EndDraw();

		// Device lost, need to recreate the render target
		// We'll dispose it now and retry drawing
		if (D2DERR_RECREATE_TARGET == hr)
		{
			hr = S_OK;
			//DiscardDirect2DResources();
		}
	}

	if (!m_nStartTime)
	{
		m_nStartTime = nTime;
	}

	double fps = 0.0;

	LARGE_INTEGER qpcNow = { 0 };
	if (m_fFreq)
	{
		if (QueryPerformanceCounter(&qpcNow))
		{
			if (m_nLastCounter)
			{
				m_nFramesSinceUpdate++;
				fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
			}
		}
	}


}
void KinectV2Module::processBody(int nBodyCount, IBody ** ppBodies)
{
	IBody* pBody = nullptr;

	if (curBodyIndex != -1)
	{
		IBody* b = ppBodies[curBodyIndex];
		if (b)
		{
			BOOLEAN t;
			HRESULT hr = b->get_IsTracked(&t);
			if (SUCCEEDED(hr) && t) pBody = b;
		}
		
	}

	if (pBody == nullptr)
	{
		for (int i = 0; i < nBodyCount; i++)
		{
			IBody* b = ppBodies[i];
			if (!b) continue;
			BOOLEAN t;
			HRESULT hr = b->get_IsTracked(&t);
			if (SUCCEEDED(hr) && t)
			{
				pBody = b;
				curBodyIndex = i;
				break;
			}
		}
	}
	

	if (pBody == nullptr)
	{
		curBodyIndex = -1;
		return;
	}

	Joint joints[JointType_Count];
	HandState leftHandState = HandState_Unknown;
	HandState rightHandState = HandState_Unknown;

	pBody->get_HandLeftState(&leftHandState);
	pBody->get_HandRightState(&rightHandState);

	pBody->GetJoints(_countof(joints), joints);
	
	Vector3D<float> leftHandPos = Vector3D<float>(joints[JointType_HandLeft].Position.X, joints[JointType_HandLeft].Position.Y, joints[JointType_HandLeft].Position.Z);
	Vector3D<float> rightHandPos = Vector3D<float>(joints[JointType_HandRight].Position.X, joints[JointType_HandRight].Position.Y, joints[JointType_HandRight].Position.Z);
	Point<float> left2D = Point<float>(leftHandPos.x, leftHandPos.y);
	Point<float> right2D = Point<float>(rightHandPos.x, rightHandPos.y);

	leftHandX->setValue(leftHandPos.x);
	leftHandY->setValue(leftHandPos.y);
	rightHandX->setValue(rightHandPos.x);
	rightHandY->setValue(rightHandPos.y);
	handsDistance->setValue((rightHandPos - leftHandPos).lengthSquared());
	handsAngle->setValue(radiansToDegrees(left2D.getAngleToPoint(right2D)) + 180);
	leftHandOpen->setValue(leftHandState == HandState_Open);
	rightHandOpen->setValue(rightHandState == HandState_Open);
}
Exemple #19
0
void Kin2::getHDFaces(bool withVertices, std::vector<k2::HDFaceData>& facesData)
{
    if (!(m_flags & k2::HD_FACE))
	{
        mexPrintf("ERROR: NO HD-FACE FUNCTIONALITY SELECTED!\n");
        return;
    }
        
	HRESULT hr;
	facesData.clear();

	// iterate through each HD face reader
	for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
	{
		// retrieve the latest face frame from this reader
		IHighDefinitionFaceFrame *pHDFaceFrame = nullptr;
		
		hr = m_pHDFaceFrameReaders[iFace]->AcquireLatestFrame(&pHDFaceFrame);

		BOOLEAN bFaceTracked = false;
		if (SUCCEEDED(hr) && nullptr != pHDFaceFrame)
		{
			// check if a valid face is tracked in this face frame
			hr = pHDFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
		}

		// If face tracked, save its data on the facesData structure array
		if (bFaceTracked)
		{		
            float animationUnits[FaceShapeAnimations_Count]={0};					
			UINT32 vertexCount;
            
            // Here we save the HD face data
			k2::HDFaceData faceData;
            
			hr = pHDFaceFrame->GetAndRefreshFaceAlignmentResult(m_pFaceAlignment[iFace]);

            if (SUCCEEDED(hr) && m_pFaceAlignment[iFace] != nullptr)
			{	
                // Get the Animation units
                hr = m_pFaceAlignment[iFace]->GetAnimationUnits(FaceShapeAnimations_Count, animationUnits);

                if (SUCCEEDED(hr))
                {
                    for (int vi = 0; vi < FaceShapeAnimations_Count; vi++)
                        faceData.animationUnits[vi] = animationUnits[vi];
                }

                // If HD face model vertices are requested
				if (withVertices)
				{
                    hr = GetFaceModelVertexCount(&vertexCount);
                    //mexPrintf("Number of Vertices: %d", vertexCount);

					// If there is no model ready, issue a warning message (just once)
					if (!m_faceModelReady[iFace] && !m_faceModelWarning[iFace])
					{
						mexPrintf("WARNING: No personal model has been created. An average face model will be used\n");
						m_faceModelWarning[iFace] = true;
					}
                    
                    CameraSpacePoint *vertices = new CameraSpacePoint[vertexCount];

					// Get the vertices (HD points)
					if (SUCCEEDED(hr))
						hr = m_pFaceModel[iFace]->CalculateVerticesForAlignment(m_pFaceAlignment[iFace], vertexCount, vertices);

					if (SUCCEEDED(hr))
                    {
						faceData.faceModel.resize(vertexCount);

						for (int vi = 0; vi < vertexCount; vi++)
							faceData.faceModel[vi] = vertices[vi];
					}

					if (vertices)
					{
						delete[] vertices;
						vertices = NULL;
					}
                } // if withVertices	
				
                // Get the facebox
                if (SUCCEEDED(hr))
                    hr = m_pFaceAlignment[iFace]->get_FaceBoundingBox(&faceData.faceBox);

                // Get the face rotation
                if (SUCCEEDED(hr))
                    hr = m_pFaceAlignment[iFace]->get_FaceOrientation(&faceData.faceRotation);

                // Get the head pivot
                if (SUCCEEDED(hr))
                {
                    hr = m_pFaceAlignment[iFace]->get_HeadPivotPoint(&faceData.headPivot);
                }

                // Save the HD face data in the member variable m_HDfacesData
                facesData.push_back(faceData);			
            }  // if face alignment	
        } // If face tracked
		else
		{
			// face tracking is not valid - attempt to fix the issue
			// a valid body is required to perform this step
			if (m_bHaveBodyData)
			{
				// check if the corresponding body is tracked 
				// if this is true then update the face frame source to track this body
				IBody* pBody = m_ppBodies[iFace];
				if (pBody != nullptr)
				{
					BOOLEAN bTracked = false;
					hr = pBody->get_IsTracked(&bTracked);

					UINT64 bodyTId;
					if (SUCCEEDED(hr) && bTracked)
					{
						// get the tracking ID of this body
						hr = pBody->get_TrackingId(&bodyTId);
						if (SUCCEEDED(hr))
						{
							// update the face frame source with the tracking ID
							m_pHDFaceFrameSources[iFace]->put_TrackingId(bodyTId);
						}
					}
				} // if (pBody != nullptr)
			} // if (m_bHaveBodyData)
		} // if face tracked

		SafeRelease(pHDFaceFrame);		
	} // for each face reader
} // end getHDFaces function
void Kinect2Manager::ProcessBody(unsigned int nTime, unsigned int nBodyCount, IBody * ppBodies[6]) {
#ifdef _USE_KINECT
    UCHAR bestBody = 0xff;
    float bestScore = 0;

    float trackingStateTable[3];
    trackingStateTable[TrackingState_Inferred] = 0.5;
    trackingStateTable[TrackingState_NotTracked] = 0;
    trackingStateTable[TrackingState_Tracked] = 1;

    for (int i = 0; i<nBodyCount; ++i) {
        IBody * body = ppBodies[i];

        BOOLEAN bodyTracked;
        HRESULT hr = body->get_IsTracked(&bodyTracked);

        if (!SUCCEEDED(hr) || !bodyTracked) continue;

        Joint joints[JointType_Count];
        hr = body->GetJoints(JointType_Count, joints);

        if (!SUCCEEDED(hr)) continue;

        float score = 0;
        for (int j = 0; j<JointType_Count; ++j) {
            score += trackingStateTable[joints[j].TrackingState];
        }

        if (score > bestScore) {
            bestScore = score;
            bestBody = i;
        }
    }

    if (bestBody == 0xff) {
        m_bSkeletonIsGood = false;
        m_nBodyIndex = 0xff;
        return;
    }
    HRESULT hr = ppBodies[bestBody]->GetJoints(JointType_Count, m_pJoints);

    if (!SUCCEEDED(hr)) {
        std::cerr << "Error saving joints\n";
        m_bSkeletonIsGood = false;
        m_nBodyIndex = 0xff;
        return;
    }

    hr = ppBodies[bestBody]->GetJointOrientations(JointType_Count, m_pJointOrientations);

    if (!SUCCEEDED(hr)) {
        std::cerr << "Error saving joint orientations\n";
        m_bSkeletonIsGood = false;
        m_nBodyIndex = 0xff;
        return;
    }

    m_bSkeletonIsGood = true;
    m_nBodyIndex = bestBody;

    TrackingConfidence handLeftConfidence;
    hr = ppBodies[bestBody]->get_HandLeftConfidence(&handLeftConfidence);
    if (!SUCCEEDED(hr)) {
        handLeftConfidence = TrackingConfidence::TrackingConfidence_Low;
    }
    m_nHandLeftConfidence = handLeftConfidence;


    TrackingConfidence handRightConfidence;
    hr = ppBodies[bestBody]->get_HandRightConfidence(&handRightConfidence);
    if (!SUCCEEDED(hr)) {
        handRightConfidence = TrackingConfidence::TrackingConfidence_Low;
    }
    m_nHandRightConfidence = handRightConfidence;
#else
#endif
}
Exemple #21
0
void Kin2::buildHDFaceModels(int &collectionStatus, int &captureStatus)
{
    collectionStatus = -1;
    captureStatus = -1;
    
	if (!(m_flags & k2::HD_FACE))
	{
        mexPrintf("ERROR: NO HD-FACE FUNCTIONALITY SELECTED!\n");
        return;
    }
    
	HRESULT hr;

	// iterate through each HD face reader
	for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
	{
		// retrieve the latest face frame from this reader
		IHighDefinitionFaceFrame *pHDFaceFrame = nullptr;

		hr = m_pHDFaceFrameReaders[iFace]->AcquireLatestFrame(&pHDFaceFrame);

		BOOLEAN bFaceTracked = false;
		if (SUCCEEDED(hr) && nullptr != pHDFaceFrame)
		{
			// check if a valid face is tracked in this face frame
			hr = pHDFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
		}

		// If face tracked, try to align it
		if (SUCCEEDED(hr) && bFaceTracked)
		{
			IFaceModel *pFaceModel = nullptr;

			hr = pHDFaceFrame->GetAndRefreshFaceAlignmentResult(m_pFaceAlignment[iFace]);

			// If face aligned, continue building the model
			if (SUCCEEDED(hr) && m_pFaceAlignment[iFace] != nullptr)
			{
				// If face model not ready
				if (!m_faceModelReady[iFace])
				{
					FaceModelBuilderCollectionStatus collection;
					hr = m_pFaceModelBuilder[iFace]->get_CollectionStatus(&collection);
                    collectionStatus = (int)collection;

					// If model completed
					if (collection == FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_Complete)
					{
						mexPrintf("Face Model Completed!\n");						

						IFaceModelData* pFaceModelData = nullptr;
						hr = m_pFaceModelBuilder[iFace]->GetFaceData(&pFaceModelData);

						// Produce the model
						if (SUCCEEDED(hr) && pFaceModelData != nullptr)
						{
                            mexPrintf("Producing model...\n");
							hr = pFaceModelData->ProduceFaceModel(&m_pFaceModel[iFace]);
                            mexPrintf("Model Ready!\n");

							// Set the model ready flag
							if (SUCCEEDED(hr) && m_pFaceModel[iFace] != nullptr)
							{
								m_faceModelReady[iFace] = true;
							}
						}
						SafeRelease(pFaceModelData);

						// Get the shape units (SU) i.e. the deformations wrt the base face model
                        /*
						if (SUCCEEDED(hr))
						{
							float deformations[FaceShapeDeformations_Count];
							hr = m_pFaceModel[iFace]->GetFaceShapeDeformations(FaceShapeDeformations_Count, deformations);										
						}
                        */
					}
					// if model not completed yet
					else
					{
						// Display Collection Status
                        /*
						if (collection >= FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_TiltedUpViewsNeeded)
						{
							mexPrintf("Need : Tilted Up Views\n");							
						}


						else if (collection >= FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_RightViewsNeeded)
						{
							mexPrintf("Need : Right Views\n");							
						}

						else if (collection >= FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_LeftViewsNeeded)
						{
							mexPrintf("Need : Left Views\n");							
						}

						else if (collection >= FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_FrontViewFramesNeeded)
						{
							mexPrintf("Need : Front ViewFrames\n");							
						}
                        */ 

						// Display Capture Status
						FaceModelBuilderCaptureStatus capture;
						hr = m_pFaceModelBuilder[iFace]->get_CaptureStatus(&capture);

                        captureStatus = (int)capture;
                        
                        /*
						switch (capture)
						{
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_OtherViewsNeeded:
							std::cout << "Other views needed" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_FaceTooFar:
							std::cout << "Face Too Far from Camera" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_FaceTooNear:
							std::cout << "Face Too Near to Camera" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus_MovingTooFast:
							std::cout << "Moving Too Fast" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_LostFaceTrack:
							std::cout << "Lost Face Track" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_SystemError:
							std::cout << "ERROR: System Error" << std::endl;
							break;

						default:
							break;
						}
                         */
					} // collection not complete
				} // If face model not ready
			} // If face aligned
		} // If face tracked
		else
		{
			// face tracking is not valid - attempt to fix the issue
			// a valid body is required to perform this step
			if (m_bHaveBodyData)
			{
				// check if the corresponding body is tracked 
				// if this is true then update the face frame source to track this body
				IBody* pBody = m_ppBodies[iFace];
				if (pBody != nullptr)
				{
					BOOLEAN bTracked = false;
					hr = pBody->get_IsTracked(&bTracked);

					UINT64 bodyTId;
					if (SUCCEEDED(hr) && bTracked)
					{
						// get the tracking ID of this body
						hr = pBody->get_TrackingId(&bodyTId);
						if (SUCCEEDED(hr))
						{
							// update the face frame source with the tracking ID
							m_pHDFaceFrameSources[iFace]->put_TrackingId(bodyTId);
						}
					}
				} // if (pBody != nullptr)
			} // if (m_bHaveBodyData)
		} // if face tracked
	}// for each face reader

} // end buildHDFaceModels
Exemple #22
0
int main()
{
	printf("Hello, Wellcome to kinect world!\n");
	IKinectSensor* bb; //申请一个Sensor指针
	HRESULT hr = GetDefaultKinectSensor(&bb); // 获取一个默认的Sensor
	if ( FAILED(hr) )
	{
		printf("No Kinect connect to your pc!\n");
		goto endstop;
	}
	BOOLEAN bIsOpen = 0;
	bb->get_IsOpen(&bIsOpen); // 查看下是否已经打开
	printf("bIsOpen: %d\n", bIsOpen);

	if ( !bIsOpen ) // 没打开,则尝试打开
	{
		hr = bb->Open();
		if ( FAILED(hr) )
		{
			printf("Kinect Open Failed!\n");
			goto endstop;
		}
		printf("Kinect opened! But it need sometime to work!\n");
		// 这里一定要多等会,否则下面的判断都是错误的
		printf("Wait For 3000 ms...\n");
		Sleep(3000);
	}
	bIsOpen = 0;
	bb->get_IsOpen(&bIsOpen); // 是否已经打开
	printf("bIsOpen: %d\n", bIsOpen);
	BOOLEAN bAvaliable = 0;
	bb->get_IsAvailable(&bAvaliable); // 是否可用
	printf("bAvaliable: %d\n", bAvaliable);

	DWORD dwCapability = 0; 
	bb->get_KinectCapabilities(&dwCapability); // 获取容量
	printf("dwCapability: %d\n", dwCapability);
	WCHAR bbuid[256] = { 0 };
	bb->get_UniqueKinectId(256, bbuid); // 获取唯一ID
	printf("UID: %s\n",bbuid);

	// 音频数据获取
	// 获取身体数据
	IBodyFrameSource* bodys = nullptr;
	bb->get_BodyFrameSource(&bodys); // Body 数据源
	INT32 nBodyNum = 0;
	bodys->get_BodyCount(&nBodyNum); // 获取body 个数,没用,一直是6
	printf("Body Num: %d\n", nBodyNum);
	IBodyFrameReader* bodyr = nullptr;
	bodys->OpenReader(&bodyr); // 准备读取body数据

	while (true)
	{
		IBodyFrame* bodyf = nullptr;
		bodyr->AcquireLatestFrame(&bodyf); // 获取最近的一帧数据
		if ( !bodyf )
		{
			Sleep(100);
			printf(".");
			continue;
		}
		IBody* ppBodies[BODY_COUNT] = { 0 };
		bodyf->GetAndRefreshBodyData(BODY_COUNT, ppBodies); // 更新所有人身体数据
		for (int i = 0; i < BODY_COUNT; ++i)
		{
			IBody* pBody = ppBodies[i]; // 轮询每个人的信息
			if (pBody)
			{
				BOOLEAN bTracked = false;
				hr = pBody->get_IsTracked(&bTracked); // 检测是否被跟踪,即是否有这个人
				if (bTracked)
				{
					Joint joints[JointType_Count];
					HandState leftHandState = HandState_Unknown;
					HandState rightHandState = HandState_Unknown;
					pBody->get_HandLeftState(&leftHandState); // 获取左手的状态
					pBody->get_HandRightState(&rightHandState); // 获取右手的状态

					hr = pBody->GetJoints(_countof(joints), joints); // 获取身体的骨骼信息,25点
					printf("Person %d : Joints[0].Z  %.2f\n", i, joints[0].Position.X); //简单的输出他的信息

				}
			}
		}
		for (int i = 0; i < BODY_COUNT; ++i)
		{
			ppBodies[i]->Release();
		}
		bodyf->Release();
	}


endclose:
	bb->Close();
endstop:
	system("pause");
	return 0;
}
Exemple #23
0
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{
    if (m_hWnd)
    {
        HRESULT hr = EnsureDirect2DResources();

        DetectionResult nEngaged[6] = { DetectionResult_Unknown };

        int width = 0;
        int height = 0;
        if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper)
        {
            m_pRenderTarget->BeginDraw();
            m_pRenderTarget->Clear();

            RECT rct;
            GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
            width = rct.right;
            height = rct.bottom;

            for (int i = 0; i < nBodyCount; ++i)
            {
                nEngaged[i] = DetectionResult_Maybe;

                IBody* pBody = ppBodies[i];
                if (pBody)
                {
                    BOOLEAN bTracked = false;
                    hr = pBody->get_IsTracked(&bTracked);

                    // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。
                    hr = pBody->get_Engaged( &nEngaged[i] );
                    // 以下はまだ使えないようだ
                    //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]);

                    if (SUCCEEDED(hr) && bTracked)
                    {
                        Joint joints[JointType_Count];
                        D2D1_POINT_2F jointPoints[JointType_Count];
                        HandState leftHandState = HandState_Unknown;
                        HandState rightHandState = HandState_Unknown;

                        pBody->get_HandLeftState(&leftHandState);
                        pBody->get_HandRightState(&rightHandState);

                        hr = pBody->GetJoints(_countof(joints), joints);
                        if (SUCCEEDED(hr))
                        {
                            for (int j = 0; j < _countof(joints); ++j)
                            {
                                jointPoints[j] = BodyToScreen(joints[j].Position, width, height);
                            }

                            DrawBody(joints, jointPoints);

                            // ここに頭部に丸を描いて、ボディ番号を表示
                            DrawHead(jointPoints[JointType_Head], i, nEngaged[i]);

                            DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
                            DrawHand(rightHandState, jointPoints[JointType_HandRight]);
                        }
                    }
                }
            }

            hr = m_pRenderTarget->EndDraw();

            // Device lost, need to recreate the render target
            // We'll dispose it now and retry drawing
            if (D2DERR_RECREATE_TARGET == hr)
            {
                hr = S_OK;
                DiscardDirect2DResources();
            }
        }

        if (!m_nStartTime)
        {
            m_nStartTime = nTime;
        }

        double fps = 0.0;

        LARGE_INTEGER qpcNow = {0};
        if (m_fFreq)
        {
            if (QueryPerformanceCounter(&qpcNow))
            {
                if (m_nLastCounter)
                {
                    m_nFramesSinceUpdate++;
                    fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
                }
            }
        }

        WCHAR szStatusMessage[128] ;
        StringCchPrintf( szStatusMessage, _countof(szStatusMessage),
                         L" FPS = %0.2f  Time = %I64d width:%d height:%d",
                         fps, (nTime - m_nStartTime), width, height);

        if (SetStatusMessage(szStatusMessage, 1000, false))
        {
            m_nLastCounter = qpcNow.QuadPart;
            m_nFramesSinceUpdate = 0;
        }
    }
}
Exemple #24
0
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void CColorBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{
	if (m_hWnd)
	{
		HRESULT hr = S_OK;

		D2D1_POINT_2F start;
		start.x = 1500.0;
		start.y = 800.0;

		D2D1_POINT_2F quit;
		quit.x = 300.0;
		quit.y = 800.0;

		//int width = 0;
		//int height = 0;
		if (SUCCEEDED(hr) && m_pCoordinateMapper)
		{
			// 先に実行しているProcessColor()にて行っているのでコメント
			//hr = m_pDrawColor->BeginDraw();

			DetectionResult nEngaged[6] = { DetectionResult_Unknown };
			PointF lean;

			//RECT rct;
			//GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
			//width = rct.right;
			//height = rct.bottom;

			UINT64 nTrackBody = 10;

			for (int i = 0; i < nBodyCount; ++i)
			{
				IBody* pBody = ppBodies[i];
				if (pBody)
				{
					// 手旗二人での対戦モードを想定してインデックスを取得する。
					// 本来はゲーム前に対戦の二人wフィックスしておくべきだろうが。
					// 
					// トラッキングされているボディかはちゃんと確かめること。
					BOOLEAN bTracked = false;
					hr = pBody->get_IsTracked(&bTracked);

					// Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。
					hr = pBody->get_Engaged(&nEngaged[i]);
					pBody->get_Lean(&lean);
					// 以下はまだ使えないようだ
					//hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]);

					if (SUCCEEDED(hr) && bTracked)
					{
						// トラッキングが無効な場合のインデックスは0が返るので使い方に注意!!
						UINT64 nBodyIndex = 0;
						hr = pBody->get_TrackingId(&nBodyIndex);

						Joint joints[JointType_Count];
						D2D1_POINT_2F jointPoints[JointType_Count];
						HandState leftHandState = HandState_Unknown;
						HandState rightHandState = HandState_Unknown;

						pBody->get_HandLeftState(&leftHandState);
						pBody->get_HandRightState(&rightHandState);

						hr = pBody->GetJoints(_countof(joints), joints);
						if (SUCCEEDED(hr))
						{
							// スクリーン座標に変換
							for (int j = 0; j < _countof(joints); ++j)
							{
								jointPoints[j] = BodyToScreen(joints[j].Position);
							}
							// ここに頭部に丸を描いて、ボディ番号を表示
							m_pDrawColor->DrawHead(jointPoints[JointType_Head], i, nEngaged[i], lean);

							// 手先がある領域にきたら実行
							// ボタンのような
							// 現状、複数人が認識されても実行するので、本来は最初に認識された一人のみにする必要がある。
							float xy[2] = { 0.0 };

							if (!m_bSemaphore)
							{
								if (m_pSemaphore[0])
								{
									delete m_pSemaphore[0];
									m_pSemaphore[0] = NULL;
								}
								if (m_pSemaphore[1])
								{
									delete m_pSemaphore[1];
									m_pSemaphore[1] = NULL;
								}
								m_nButton = 1;
								xy[0] = jointPoints[JointType_HandTipRight].x - start.x;
								xy[1] = jointPoints[JointType_HandTipRight].y - start.y;
								if (sqrt( xy[0]*xy[0] + xy[1]*xy[1] ) < 100.0 )
								{
									if (nTrackBody == 10 || nTrackBody == nBodyIndex)
									{
										m_nButton = 0;
										nTrackBody = nBodyIndex;
									}
								}
							}
							else
							{
								// 手旗スタート
								// 手旗判定
								if (m_pSemaphore[0] == NULL)
								{
									m_pSemaphore[0] = new Semaphore( &nBodyIndex );
								}
								else
								{
									if (m_pSemaphore[1] == NULL && !m_pSemaphore[0]->ItsMe(&nBodyIndex))
									{
										m_pSemaphore[1] = new Semaphore(&nBodyIndex);
									}
								}

								// カウント
								// 基本ポーズでのデータ取得
								// 手旗本番処理
								// 手旗の判定に画像と同等のフレームは必要はないのでは。
								// タイマーでBodyフレームを取得し、それで手旗判定を行う。
								if (m_pSemaphore[0])
								{
									m_pSemaphore[0]->SetSignalType(&nBodyIndex, jointPoints, m_pDrawColor);
								}
								if (m_pSemaphore[1])
								{
									m_pSemaphore[1]->SetSignalType(&nBodyIndex, jointPoints, m_pDrawColor);
								}
								//m_pSemaphore[0]->Practice(nTime, jointPoints, m_pDrawColor);

								// quitボタン処理
								m_nButton = 2;
								// 基本ポーズ用の表示
								xy[0] = jointPoints[JointType_HandTipLeft].x - quit.x;
								xy[1] = jointPoints[JointType_HandTipLeft].y - quit.y;
								if (sqrt( xy[0]*xy[0] + xy[1]*xy[1] ) < 100.0 )
								{
									if (nTrackBody == 10 || nTrackBody == nBodyIndex)
									{
										m_nButton = 0;
										nTrackBody = nBodyIndex;
									}
								}
							}
							m_pDrawColor->DrawBody(joints, jointPoints);
							//m_pDrawColor->DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
							//m_pDrawColor->DrawHand(rightHandState, jointPoints[JointType_HandRight]);

							Detect(pBody);
							//break;
						}
					}
				}
			}
			if (!m_bSemaphore)
			{
				// このボタン処理でウィンドウにメッセージを送っている
				m_pDrawColor->DrawButton(start, m_nButton);
			}
			else
			{
				m_pDrawColor->DrawButton(quit, m_nButton);
			}
			// 二人対戦モードのお題表示
			if (Question(nTime))
			{
				m_pDrawColor->DrawButton(quit, 0);
			}

			m_pDrawColor->EndDraw();
		}
	}
}