Ejemplo n.º 1
0
void DepthStream::getColorSpacePoints(ColorSpacePoint *colorSpacePointsFromDepth)
{
    if (lock()) {
        // Calculate the body's position on the screen
        const UINT depthPointCount = DEPTH_WIDTH * DEPTH_HEIGHT;
        ICoordinateMapper *coordinateMapper = nullptr;
        HRESULT hr = m_Device->get().kinect2->get_CoordinateMapper(&coordinateMapper);

        if (SUCCEEDED(hr)) {
            coordinateMapper->MapDepthFrameToColorSpace(depthPointCount, (UINT16 *)m_Frame.data, depthPointCount, colorSpacePointsFromDepth);
        }

        safeRelease(coordinateMapper);
        unlock();
    }
}
Ejemplo n.º 2
0
int _tmain( int argc, _TCHAR* argv[] )
{
	cv::setUseOptimized( true );

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor( &pSensor );
	if( FAILED( hResult ) ){
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open( );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IColorFrameSource* pColorSource;
	hResult = pSensor->get_ColorFrameSource( &pColorSource );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
		return -1;
	}

	IBodyFrameSource* pBodySource;
	hResult = pSensor->get_BodyFrameSource( &pBodySource );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl;
		return -1;
	}

	// Reader
	IColorFrameReader* pColorReader;
	hResult = pColorSource->OpenReader( &pColorReader );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	IBodyFrameReader* pBodyReader;
	hResult = pBodySource->OpenReader( &pBodyReader );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	// Description
	IFrameDescription* pDescription;
	hResult = pColorSource->get_FrameDescription( &pDescription );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int width = 0;
	int height = 0;
	pDescription->get_Width( &width ); // 1920
	pDescription->get_Height( &height ); // 1080
	unsigned int bufferSize = width * height * 4 * sizeof( unsigned char );

	cv::Mat bufferMat( height, width, CV_8UC4 );
	cv::Mat bodyMat( height / 2, width / 2, CV_8UC4 );
	cv::namedWindow( "Body" );

	// Color Table
	cv::Vec3b color[BODY_COUNT];
	color[0] = cv::Vec3b( 255,   0,   0 );
	color[1] = cv::Vec3b(   0, 255,   0 );
	color[2] = cv::Vec3b(   0,   0, 255 );
	color[3] = cv::Vec3b( 255, 255,   0 );
	color[4] = cv::Vec3b( 255,   0, 255 );
	color[5] = cv::Vec3b(   0, 255, 255 );

	// Coordinate Mapper
	ICoordinateMapper* pCoordinateMapper;
	hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper );
	if( FAILED( hResult ) ){
		std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
		return -1;
	}

	while( 1 ){
		// Frame
		IColorFrame* pColorFrame = nullptr;
		hResult = pColorReader->AcquireLatestFrame( &pColorFrame );
		if( SUCCEEDED( hResult ) ){
			hResult = pColorFrame->CopyConvertedFrameDataToArray( bufferSize, reinterpret_cast<BYTE*>( bufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra );
			if( SUCCEEDED( hResult ) ){
				cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 );
			}
		}
		//SafeRelease( pColorFrame );

		IBodyFrame* pBodyFrame = nullptr;
		hResult = pBodyReader->AcquireLatestFrame( &pBodyFrame );
		if( SUCCEEDED( hResult ) ){
			IBody* pBody[BODY_COUNT] = { 0 };
			hResult = pBodyFrame->GetAndRefreshBodyData( BODY_COUNT, pBody );
			if( SUCCEEDED( hResult ) ){
				for( int count = 0; count < BODY_COUNT; count++ ){
					BOOLEAN bTracked = false;
					hResult = pBody[count]->get_IsTracked( &bTracked );
					if( SUCCEEDED( hResult ) && bTracked ){
						Joint joint[JointType::JointType_Count];
						hResult = pBody[ count ]->GetJoints( JointType::JointType_Count, joint );
						if( SUCCEEDED( hResult ) ){
							// Left Hand State
							HandState leftHandState = HandState::HandState_Unknown;
							hResult = pBody[count]->get_HandLeftState( &leftHandState );
							if( SUCCEEDED( hResult ) ){
								ColorSpacePoint colorSpacePoint = { 0 };
								hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandLeft].Position, &colorSpacePoint );
								if( SUCCEEDED( hResult ) ){
									int x = static_cast<int>( colorSpacePoint.X );
									int y = static_cast<int>( colorSpacePoint.Y );
									if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){
										if( leftHandState == HandState::HandState_Open ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA );
										}
										else if( leftHandState == HandState::HandState_Closed ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA );
										}
										else if( leftHandState == HandState::HandState_Lasso ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA );
										}
									}
								}
							}

							// Right Hand State
							HandState rightHandState = HandState::HandState_Unknown;
							hResult = pBody[count]->get_HandRightState( &rightHandState );
							if( SUCCEEDED( hResult ) ){
								ColorSpacePoint colorSpacePoint = { 0 };
								hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandRight].Position, &colorSpacePoint );
								if( SUCCEEDED( hResult ) ){
									int x = static_cast<int>( colorSpacePoint.X );
									int y = static_cast<int>( colorSpacePoint.Y );
									if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){
										if( rightHandState == HandState::HandState_Open ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA );
										}
										else if( rightHandState == HandState::HandState_Closed ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA );
										}
										else if( rightHandState == HandState::HandState_Lasso ){
											cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA );
										}
									}
								}
							}

							// Joint
							for( int type = 0; type < JointType::JointType_Count; type++ ){
								ColorSpacePoint colorSpacePoint = { 0 };
								pCoordinateMapper->MapCameraPointToColorSpace( joint[type].Position, &colorSpacePoint );
								int x = static_cast<int>( colorSpacePoint.X );
								int y = static_cast<int>( colorSpacePoint.Y );
								if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){
									cv::circle( bufferMat, cv::Point( x, y ), 5, static_cast< cv::Scalar >( color[count] ), -1, CV_AA );
								}
							}
						}

						/*// Activity
						UINT capacity = 0;
						DetectionResult detectionResults = DetectionResult::DetectionResult_Unknown;
						hResult = pBody[count]->GetActivityDetectionResults( capacity, &detectionResults );
						if( SUCCEEDED( hResult ) ){
							if( detectionResults == DetectionResult::DetectionResult_Yes ){
								switch( capacity ){
									case Activity::Activity_EyeLeftClosed:
										std::cout << "Activity_EyeLeftClosed" << std::endl;
										break;
									case Activity::Activity_EyeRightClosed:
										std::cout << "Activity_EyeRightClosed" << std::endl;
										break;
									case Activity::Activity_MouthOpen:
										std::cout << "Activity_MouthOpen" << std::endl;
										break;
									case Activity::Activity_MouthMoved:
										std::cout << "Activity_MouthMoved" << std::endl;
										break;
									case Activity::Activity_LookingAway:
										std::cout << "Activity_LookingAway" << std::endl;
										break;
									default:
										break;
								}
							}
						}
						else{
							std::cerr << "Error : IBody::GetActivityDetectionResults()" << std::endl;
						}*/

						/*// Appearance
						capacity = 0;
						detectionResults = DetectionResult::DetectionResult_Unknown;
						hResult = pBody[count]->GetAppearanceDetectionResults( capacity, &detectionResults );
						if( SUCCEEDED( hResult ) ){
							if( detectionResults == DetectionResult::DetectionResult_Yes ){
								switch( capacity ){
									case Appearance::Appearance_WearingGlasses:
										std::cout << "Appearance_WearingGlasses" << std::endl;
										break;
									default:
										break;
								}
							}
						}
						else{
							std::cerr << "Error : IBody::GetAppearanceDetectionResults()" << std::endl;
						}*/

						/*// Expression
						capacity = 0;
						detectionResults = DetectionResult::DetectionResult_Unknown;
						hResult = pBody[count]->GetExpressionDetectionResults( capacity, &detectionResults );
						if( SUCCEEDED( hResult ) ){
							if( detectionResults == DetectionResult::DetectionResult_Yes ){
								switch( capacity ){
									case Expression::Expression_Happy:
										std::cout << "Expression_Happy" << std::endl;
										break;
									case Expression::Expression_Neutral:
										std::cout << "Expression_Neutral" << std::endl;
										break;
									default:
										break;
								}
							}
						}
						else{
							std::cerr << "Error : IBody::GetExpressionDetectionResults()" << std::endl;
						}*/

						// Lean
						PointF amount;
						hResult = pBody[count]->get_Lean( &amount );
						if( SUCCEEDED( hResult ) ){
							std::cout << "amount : " << amount.X << ", " << amount.Y << std::endl;
						}
					}
				}
				cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 );
			}
			for( int count = 0; count < BODY_COUNT; count++ ){
				SafeRelease( pBody[count] );
			}
		}
		//SafeRelease( pBodyFrame );

		SafeRelease( pColorFrame );
		SafeRelease( pBodyFrame );

		cv::imshow( "Body", bodyMat );

		if( cv::waitKey( 10 ) == VK_ESCAPE ){
			break;
		}
	}

	SafeRelease( pColorSource );
	SafeRelease( pBodySource );
	SafeRelease( pColorReader );
	SafeRelease( pBodyReader );
	SafeRelease( pDescription );
	SafeRelease( pCoordinateMapper );
	if( pSensor ){
		pSensor->Close();
	}
	SafeRelease( pSensor );
	cv::destroyAllWindows();

	return 0;
}
int main()
{

	// name and position windows
	cvNamedWindow("Color Probabilistic Tracking - Samples", 1);
	cvMoveWindow("Color Probabilistic Tracking - Samples", 0, 0);
	cvNamedWindow("Color Probabilistic Tracking - Result", 1);
	cvMoveWindow("Color Probabilistic Tracking - Result", 1000, 0);

	//control mouse
	setMouseCallback("Color Probabilistic Tracking - Samples", onMouse, 0);

	cv::setUseOptimized(true);

	// Sensor
	IKinectSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = GetDefaultKinectSensor(&pSensor);
	if (FAILED(hResult)) {
		std::cerr << "Error : GetDefaultKinectSensor" << std::endl;
		return -1;
	}

	hResult = pSensor->Open();
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::Open()" << std::endl;
		return -1;
	}

	// Source
	IColorFrameSource* pColorSource;
	hResult = pSensor->get_ColorFrameSource(&pColorSource);
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl;
		return -1;
	}

	IDepthFrameSource* pDepthSource;
	hResult = pSensor->get_DepthFrameSource(&pDepthSource);
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl;
		return -1;
	}

	/*IBodyIndexFrameSource* pBodyIndexSource;
	hResult = pSensor->get_BodyIndexFrameSource(&pBodyIndexSource);*/

	// Reader
	IColorFrameReader* pColorReader;
	hResult = pColorSource->OpenReader(&pColorReader);
	if (FAILED(hResult)) {
		std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	IDepthFrameReader* pDepthReader;
	hResult = pDepthSource->OpenReader(&pDepthReader);
	if (FAILED(hResult)) {
		std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl;
		return -1;
	}

	//IBodyIndexFrameReader* pBodyIndexReader;//saferealease
	//hResult = pBodyIndexSource->OpenReader(&pBodyIndexReader);

	// Description
	IFrameDescription* pColorDescription;
	hResult = pColorSource->get_FrameDescription(&pColorDescription);
	if (FAILED(hResult)) {
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int colorWidth = 0;
	int colorHeight = 0;
	pColorDescription->get_Width(&colorWidth); // 1920
	pColorDescription->get_Height(&colorHeight); // 1080
	unsigned int colorBufferSize = colorWidth * colorHeight * 4 * sizeof(unsigned char);

	cv::Mat colorBufferMat(colorHeight, colorWidth, CV_8UC4);
	cv::Mat colorMat(colorHeight / 2, colorWidth / 2, CV_8UC4);
	cv::namedWindow("Color");

	RGBQUAD* m_pDepthRGBX;
	m_pDepthRGBX = new RGBQUAD[512 * 424];// create heap storage for color pixel data in RGBX format

	IFrameDescription* pDepthDescription;
	hResult = pDepthSource->get_FrameDescription(&pDepthDescription);
	if (FAILED(hResult)) {
		std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}

	int depthWidth = 0;
	int depthHeight = 0;
	pDepthDescription->get_Width(&depthWidth); // 512
	pDepthDescription->get_Height(&depthHeight); // 424
	unsigned int depthBufferSize = depthWidth * depthHeight * sizeof(unsigned short);

	cv::Mat depthBufferMat(depthHeight, depthWidth, CV_16UC1);
	UINT16* pDepthBuffer = nullptr;
	cv::Mat depthMat(depthHeight, depthWidth, CV_8UC1);
	cv::namedWindow("Depth");

	//UINT32 nBodyIndexSize = 0;
	//BYTE* pIndexBuffer = nullptr;//This hasn't been safe realease yet

	// Coordinate Mapper
	ICoordinateMapper* pCoordinateMapper;
	hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper);
	if (FAILED(hResult)) {
		std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl;
		return -1;
	}

	cv::Mat coordinateMapperMat(depthHeight, depthWidth, CV_8UC4);
	cv::namedWindow("CoordinateMapper");

	unsigned short minDepth, maxDepth;
	pDepthSource->get_DepthMinReliableDistance(&minDepth);
	pDepthSource->get_DepthMaxReliableDistance(&maxDepth);




	while (1) {

		double t = (double)getTickCount();


		// Color Frame
		IColorFrame* pColorFrame = nullptr;
		hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
		if (SUCCEEDED(hResult)) {
			hResult = pColorFrame->CopyConvertedFrameDataToArray(colorBufferSize, reinterpret_cast<BYTE*>(colorBufferMat.data), ColorImageFormat::ColorImageFormat_Bgra);
			if (SUCCEEDED(hResult)) {
				cv::resize(colorBufferMat, colorMat, cv::Size(), 0.5, 0.5);
			}
		}
		//SafeRelease( pColorFrame );

		// Depth Frame
		IDepthFrame* pDepthFrame = nullptr;
		hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame);

		if (SUCCEEDED(hResult)) {
			hResult = pDepthFrame->AccessUnderlyingBuffer(&depthBufferSize, reinterpret_cast<UINT16**>(&depthBufferMat.data));

		}

		if (SUCCEEDED(hResult)) {
			hResult = pDepthFrame->AccessUnderlyingBuffer(&depthBufferSize, &pDepthBuffer);

			if (SUCCEEDED(hResult))
			{
				RGBQUAD* pRGBX = m_pDepthRGBX;

				// end pixel is start + width*height - 1
				const UINT16* pBufferEnd = pDepthBuffer + (512 * 424);
				int index = 0;

				while (pDepthBuffer < pBufferEnd)
				{
					USHORT depth = *pDepthBuffer;

					BYTE intensity = static_cast<BYTE>((depth >= 50) && (depth <= 5000) ? (depth % 256) : 0);

					pRGBX->rgbRed = intensity;
					pRGBX->rgbGreen = intensity;
					pRGBX->rgbBlue = intensity;

					depthData[index] = depth;

					++index;
					++pRGBX;
					++pDepthBuffer;
				}
			}
		}

		Mat DepthImage(424, 512, CV_8UC4, m_pDepthRGBX);
		//SafeRelease( pDepthFrame );

		// Mapping (Depth to Color)
		if (SUCCEEDED(hResult)) {
			std::vector<ColorSpacePoint> colorSpacePoints(depthWidth * depthHeight);
			hResult = pCoordinateMapper->MapDepthFrameToColorSpace(depthWidth * depthHeight, reinterpret_cast<UINT16*>(depthBufferMat.data), depthWidth * depthHeight, &colorSpacePoints[0]);
			if (SUCCEEDED(hResult)) {
				coordinateMapperMat = cv::Scalar(0, 0, 0, 0);
				for (int y = 0; y < depthHeight; y++) {
					for (int x = 0; x < depthWidth; x++) {
						unsigned int index = y * depthWidth + x;
						ColorSpacePoint point = colorSpacePoints[index];
						int colorX = static_cast<int>(std::floor(point.X + 0.5));
						int colorY = static_cast<int>(std::floor(point.Y + 0.5));
						unsigned short depth = depthBufferMat.at<unsigned short>(y, x);
						if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight)/* && ( depth >= minDepth ) && ( depth <= maxDepth )*/) {
							coordinateMapperMat.at<cv::Vec4b>(y, x) = colorBufferMat.at<cv::Vec4b>(colorY, colorX);
						}
					}
				}
			}
		}

		if (SUCCEEDED(hResult))
		{

			//Particle Filter Start from here
			frame = &(IplImage)coordinateMapperMat;//transorm mat into IplImage

			if (image == 0)
			{
				// initialize variables and allocate buffers 
				image = cvCreateImage(cvGetSize(frame), 8, 4);//every pixel has 4 channels
				image->origin = frame->origin;

				result = cvCreateImage(cvGetSize(frame), 8, 4);
				result->origin = frame->origin;

				hsv = cvCreateImage(cvGetSize(frame), 8, 3);

				hue = cvCreateImage(cvGetSize(frame), 8, 1);

				sat = cvCreateImage(cvGetSize(frame), 8, 1);

				histimg_ref = cvCreateImage(cvGetSize(frame), 8, 3);
				histimg_ref->origin = frame->origin;
				cvZero(histimg_ref);

				histimg = cvCreateImage(cvGetSize(frame), 8, 3);
				histimg->origin = frame->origin;
				cvZero(histimg);

				bin_w = histimg_ref->width / BIN;
				bin_h = histimg_ref->height / BIN;


				data1.sample_t = reinterpret_cast<Region *> (malloc(sizeof(Region)* SAMPLE));
				data1.sample_t_1 = reinterpret_cast<Region *> (malloc(sizeof(Region)* SAMPLE));
				data1.sample_weight = reinterpret_cast<double *> (malloc(sizeof(double)* SAMPLE));
				data1.accum_weight = reinterpret_cast<double *> (malloc(sizeof(double)* SAMPLE));

			}

			cvCopy(frame, image);
			cvCopy(frame, result);
			cvCvtColor(image, hsv, CV_BGR2HSV);//image ~ hsv


			if (tracking)
			{
				//v_max = 0.0;

				cvSplit(hsv, hue, 0, 0, 0);//hsv->hue
				cvSplit(hsv, 0, 0, sat, 0);//hsv-saturation

				if (selecting)
				{

					// get the selected target area
					//ref_v_max = 0.0;
					area.width = abs(P_org.x - P_end.x);
					area.height = abs(P_org.y - P_end.y);
					area.x = MIN(P_org.x, P_end.x);
					area.y = MIN(P_org.y, P_end.y);

					cvZero(histimg_ref);

					// build reference histogram
					cvSetImageROI(hue, area);
					cvSetImageROI(sat, area);

					// zero reference histogram
					for (i = 0; i < BIN; i++)
						for (j = 0; j < BIN; j++)
							hist_ref[i][j] = 0.0;

					// calculate reference histogram
					for (i = 0; i < area.height; i++)
					{
						for (j = 0; j < area.width; j++)
						{
							im_hue = cvGet2D(hue, i, j);
							im_sat = cvGet2D(sat, i, j);
							k = int(im_hue.val[0] / STEP_HUE);
							h = int(im_sat.val[0] / STEP_SAT);
							hist_ref[k][h] = hist_ref[k][h] + 1.0;
						}
					}


					// rescale the value of each bin in the reference histogram 
					// and show it as an image
					for (i = 0; i < BIN; i++)
					{
						for (j = 0; j < BIN; j++)
						{
							hist_ref[i][j] = hist_ref[i][j] / (area.height*area.width);
						}
					}

					cvResetImageROI(hue);
					cvResetImageROI(sat);

					// initialize tracking and samples
					track_win = area;
					Initdata(track_win);
					track_win_last = track_win;

					// set up flag of tracking
					selecting = 0;

				}

				// sample propagation and weighting
				track_win = ImProcess(hue, sat, hist_ref, track_win_last);
				FrameNumber++;

				track_win_last = track_win;
				cvZero(histimg);

				// draw the one RED bounding box
				cvRectangle(image, cvPoint(track_win.x, track_win.y), cvPoint(track_win.x + track_win.width, track_win.y + track_win.height), CV_RGB(255, 0, 0), 2);
				printf("width = %d, height = %d\n", track_win.width, track_win.height);

				//save certian images
				if (FrameNumber % 10 == 0)
				{
					if (FrameNumber / 10 == 1) cvSaveImage("./imageout1.jpg", image);

					if (FrameNumber / 10 == 2) cvSaveImage("./imageout2.jpg", image);

					if (FrameNumber / 10 == 3) cvSaveImage("./imageout3.jpg", image);

					if (FrameNumber / 10 == 4) cvSaveImage("./imageout4.jpg", image);

					if (FrameNumber / 10 == 5) cvSaveImage("./imageout5.jpg", image);

					if (FrameNumber / 10 == 6) cvSaveImage("./imageout6.jpg", image);

					if (FrameNumber / 10 == 7) cvSaveImage("./imageout7.jpg", image);

					if (FrameNumber / 10 == 8) cvSaveImage("./imageout8.jpg", image);
				}

				//save certian images
				if (FrameNumber % 10 == 0)
				{
					if (FrameNumber / 10 == 1) cvSaveImage("./resultout1.jpg", result);

					if (FrameNumber / 10 == 2) cvSaveImage("./resultout2.jpg", result);

					if (FrameNumber / 10 == 3) cvSaveImage("./resultout3.jpg", result);

					if (FrameNumber / 10 == 4) cvSaveImage("./resultout4.jpg", result);

					if (FrameNumber / 10 == 5) cvSaveImage("./resultout5.jpg", result);

					if (FrameNumber / 10 == 6) cvSaveImage("./resultout6.jpg", result);

					if (FrameNumber / 10 == 7) cvSaveImage("./resultout7.jpg", result);

					if (FrameNumber / 10 == 8) cvSaveImage("./resultout8.jpg", result);
				}

				//draw a same bounding box in DepthImage
				rectangle(DepthImage, track_win, CV_RGB(255, 0, 0), 2);

				//******************************************************Geodesic Distance***************************************************************************************
					//Point propagation and weight
					if (PointTrack == 1)
					{

						if (PointSelect == 1)//only visit once
						{

							// initialize tracking and samples
							for (int i = 0; i < SAMPLE; i++)
							{
								point[i].x_1 = P_track.x;
								point[i].y_1 = P_track.y;
								point[i].z_1 = depthData[P_track.x + P_track.y * 512];
								point[i].x_1_prime = 0.0;
								point[i].y_1_prime = 0.0;
							}
							
							refeFlag = 1;

							p_win = P_track;

							//p_transtart is the start point of the surface mesh
							P_transtart.x = track_win.x;
							P_transtart.y = track_win.y;

							PointSelect = 0;

						}

						//construct the graph(mesh)
						ConstructMesh(depthData, adjlist, P_transtart,track_win.width,track_win.height);

						//calculate shortest path
						vector<int> vertexDist;
						vertexDist.resize(track_win.width*track_win.height);

						ShortestPath(P_extre, adjlist,  vertexDist);

						cvCircle(image, P_extre, 3, CV_RGB(0, 255, 0),1);

						//generate the refernce distance for comparing
						if (refeFlag > 0)
						{
							cvCircle(image, p_win, 3, CV_RGB(0, 0, 255), 1);

							int track = abs(P_transtart.x - P_track.x) + track_win.width * abs(P_transtart.y - P_track.y);

							referDistance = vertexDist[track];

							refeFlag = 0;
						}
						
						//samples propagation
						PredictPoint(p_win);

						//get geodesic distance for each sample. 
						//find the sample which have most similar distance to the refernce distance
						float Dist, AbsDist, WinDist, minAbsDist = 10000;
						int number,sum=0,count=0;

						for (int i = 0; i < SAMPLE; i++)
						{
							int t = abs(P_transtart.x - point[i].x) + track_win.width * abs(P_transtart.y - point[i].y);
							if (adjlist[t].v == false) { count++; continue; }

							int refer = abs(point[i].x - P_transtart.x) + track_win.width * abs(point[i].y - P_transtart.y);
							Dist = vertexDist[refer];

							AbsDist = fabs(referDistance - Dist);

							//point[i].SampleWeight = AbsDist;
							//point[i].AccumWeight = sum;
							//sum = sum + AbsDist;

							if (AbsDist < minAbsDist) { AbsDist = Dist; number = i; WinDist = Dist; }

						}

						//for (int i = 0; i < SAMPLE; i++)
						//{
						//	point[i].SampleWeight = point[i].SampleWeight / sum;
						//	point[i].AccumWeight = point[i].AccumWeight / sum;
						//}

						printf("referDist = %f, winDist = %f, discardPoints = %d\n", referDistance, WinDist,count);
						
						p_win_last = p_win;
						p_win.x = point[number].x;
						p_win.y = point[number].y;

						//samples re-location
						float deltaX = p_win.x - p_win_last.x;
						float deltaY = p_win.y - p_win_last.y;

						UpdatePoint(number, deltaX, deltaY);

						cvCircle(image, p_win, 5, CV_RGB(0, 0, 0));
					}



				//	//****************************************************************************************************************************************
			}

			// if still selecting a target, show the RED selected area
			else cvRectangle(image, P_org, P_end, CV_RGB(255, 0, 0), 1);

		}

		imshow("Depth", DepthImage);
		cvShowImage("Color Probabilistic Tracking - Samples", image);
		cvShowImage("Color Probabilistic Tracking - Result", result);

		SafeRelease(pColorFrame);
		SafeRelease(pDepthFrame);
		//SafeRelease(pBodyIndexFrame);

		cv::imshow("Color", colorMat);
		cv::imshow("Depth", DepthImage);
		cv::imshow("CoordinateMapper", coordinateMapperMat);

		//END OF THE TIME POINT
		t = ((double)getTickCount() - t) / getTickFrequency();
		t = 1 / t;

		//cout << "FPS:" << t << "FrameNumber\n" << FrameNumebr<< endl;
		printf("FPS:%f Frame:%d \n\n", t, FrameNumber);


		if (cv::waitKey(30) == VK_ESCAPE) {
			break;
		}
	}

	SafeRelease(pColorSource);
	SafeRelease(pDepthSource);
	//SafeRelease(pBodyIndexSource);

	SafeRelease(pColorReader);
	SafeRelease(pDepthReader);
	//SafeRelease(pBodyIndexReader);

	SafeRelease(pColorDescription);
	SafeRelease(pDepthDescription);
	SafeRelease(pCoordinateMapper);
	if (pSensor) {
		pSensor->Close();
	}
	SafeRelease(pSensor);
	cv::destroyAllWindows();


	cvReleaseImage(&image);
	cvReleaseImage(&result);
	cvReleaseImage(&histimg_ref);
	cvReleaseImage(&histimg);
	cvReleaseImage(&hsv);
	cvReleaseImage(&hue);
	cvReleaseImage(&sat);
	cvDestroyWindow("Color Probabilistic Tracking - Samples");
	cvDestroyWindow("Color Probabilistic Tracking - Result");

	return 0;
}
Ejemplo n.º 4
0
int main(int argc, char* argv[])
{


	int similarity = 0;

	string line;
	ifstream myfile ("source_config.txt");
	if (myfile.is_open())
	{
		getline (myfile,line);
		hauteurCamera = stoi(line);
		getline (myfile,line);
		fountainXPosition = stoi(line);
		getline (myfile,line);
		fountainYPosition = stoi(line);
		getline (myfile,line);
		fountainWidth = stoi(line);

		getline (myfile,line);
		blasterWidth = stoi(line);
		getline (myfile,line);
		int numberOfBlaster = stoi(line);

		for(int i = 0;i<numberOfBlaster;i++){
			getline (myfile,line);
			blasterXPosition.push_back(stoi(line));
			getline (myfile,line);
			blasterYPosition.push_back(stoi(line));
		}

		myfile.close();
	}

	else
	{
		cout << "Unable to open file"; 
		exit(-1);
	}

	IKinectSensor* m_pKinectSensor;
	IDepthFrameReader* pDepthReader;
	IDepthFrameSource* pDepthFrameSource = NULL; // Depth image

	IColorFrameReader* pColorReader;
	IColorFrameSource* pColorFrameSource = NULL;


	HRESULT hr;

	hr = GetDefaultKinectSensor(&m_pKinectSensor);
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : GetDefaultKinectSensor failed." << endl;
		return false;
	}

	hr = m_pKinectSensor->Open();
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : Open failed." << endl;
		return false;
	}

	hr = m_pKinectSensor->get_DepthFrameSource( &pDepthFrameSource );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : get_DepthFrameSource failed." << endl;
		return false;
	}
	hr = pDepthFrameSource->OpenReader( &pDepthReader );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : OpenReader failed." << endl;
		return false;
	}


	IFrameDescription* pDepthDescription;
	hr = pDepthFrameSource->get_FrameDescription( &pDepthDescription );
	if( FAILED( hr ) ){
		std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}
	w = 0,h = 0;
	pDepthDescription->get_Width( &w ); // 512
	pDepthDescription->get_Height( &h ); // 424
	//unsigned int irBufferSize = w * h * sizeof( unsigned short );


	hr = m_pKinectSensor->get_ColorFrameSource( &pColorFrameSource );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : get_ColorFrameSource failed." << endl;
		return false;
	}
	hr = pColorFrameSource->OpenReader( &pColorReader );
	if (FAILED(hr)){
		cout << "ColorTrackerv2 Error : OpenReader failed." << endl;
		return false;
	}

	// Description
	IFrameDescription* pColorDescription;
	hr = pColorFrameSource->get_FrameDescription( &pColorDescription );
	if( FAILED( hr ) ){
		std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl;
		return -1;
	}
	int colorWidth = 0;
	int colorHeight = 0;
	pColorDescription->get_Width( &colorWidth ); // 1920
	pColorDescription->get_Height( &colorHeight ); // 1080
	unsigned int colorBufferSize = colorWidth * colorHeight * 4 * sizeof( unsigned char );

	Mat colorBufferMat = Mat::zeros( cvSize(colorWidth,colorHeight), CV_8UC4   );
	//colorMat = Mat::zeros( cvSize(colorWidth/2,colorHeight/2), CV_8UC4   );



	ICoordinateMapper* pCoordinateMapper;
	hr = m_pKinectSensor->get_CoordinateMapper( &pCoordinateMapper );



	//  open a opencv window
	cv::namedWindow("source_config", CV_WINDOW_AUTOSIZE );
	setMouseCallback("source_config", MouseCallBackFunc, NULL);

	Mat frame(h,w, CV_8UC3, Scalar(255,255,255));
	Mat display;
	//Mat img;

	char k = 0;
	while(k!=27){

		HRESULT hResult = S_OK;

		if(displayColor){
			IColorFrame* pColorFrame = nullptr;
			hResult = pColorReader->AcquireLatestFrame( &pColorFrame );
			while(!SUCCEEDED(hResult)){
				Sleep(50);
				hResult = pColorReader->AcquireLatestFrame(&pColorFrame);
			}

			if( SUCCEEDED( hResult ) ){
				hResult = pColorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<BYTE*>( colorBufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra );
				if( !SUCCEEDED( hResult ) ){
					return false;
				}

				resize(colorBufferMat,display,Size(displaySize*w,displaySize*h));

				flip(display,display,1);

				cv::line(display,Point(displaySize*w/2,0),Point(displaySize*w/2,displaySize*h),Scalar(0,0,255),2);
				cv::line(display,Point(0,displaySize*h/2),Point(displaySize*w,displaySize*h/2),Scalar(0,0,255),2);


				if (pColorFrame )
				{
					pColorFrame ->Release();
					pColorFrame  = NULL;
				}



			}
			else 
				return false;



		}
		else
		{

			IDepthFrame* pDepthFrame = nullptr;
			hResult = pDepthReader->AcquireLatestFrame( &pDepthFrame );
			while(!SUCCEEDED(hResult)){
				Sleep(10);
				hResult = pDepthReader->AcquireLatestFrame( &pDepthFrame );
			}

			if( SUCCEEDED( hResult ) ){
				unsigned int bufferSize = 0;
				unsigned short* buffer = nullptr;
				hResult = pDepthFrame->AccessUnderlyingBuffer( &bufferSize, &buffer );

				if( SUCCEEDED( hResult ) ){
					for( int y = 0; y < h; y++ ){
						for( int x = 0; x < w; x++ ){
							Vec3b intensity = frame.at<Vec3b>(y, x);
							if(buffer[ y * w + (w - x - 1) ]  < hauteurCamera){
								int d = buffer[ y * w + (w - x - 1) ];
								intensity.val[0] = 2.55*(d % 100);
								intensity.val[1] = 1.22*(d % 200);
								intensity.val[2] = 256.0*d/hauteurCamera;
							}
							else
							{
								intensity.val[0] = 255;
								intensity.val[1] = 255;
								intensity.val[2] = 255;
							}
							/*intensity.val[0] = buffer[ y * w + x ] >> 8;
							intensity.val[1] = buffer[ y * w + x ] >> 8;
							intensity.val[2] = buffer[ y * w + x ] >> 8;*/
							frame.at<Vec3b>(y, x) = intensity;
						}
					}

					// changer la couleur du rectangle en fonction de la hauteur des coins (similaire ou non) ( moins de 4cm)

					float d1 = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
					float d2 = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];
					float d3 = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
					float d4 = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];

					if((d1 < 0)||(d1>3500)||(d2 < 0)||(d2>3500)||(d3 < 0)||(d3>3500)||(d4 < 0)||(d4>3500)){
						similarity = 0;
					}
					else
					{
						int mn = 100;
						if((abs(d1-d2) < mn)
							&&(abs(d1-d3) < mn)
							&&(abs(d1-d4) < mn)
							&&(abs(d2-d3) < mn)
							&&(abs(d2-d4) < mn)
							&&(abs(d3-d4) < mn))
							similarity = 255;
						else{
							int md = abs(d1-d2);
							md = MAX(md,abs(d1-d3));
							md = MAX(md,abs(d1-d4));
							md = MAX(md,abs(d2-d3));
							md = MAX(md,abs(d2-d4));
							md = MAX(md,abs(d3-d4));
							if(md-mn>128)
								similarity = 0;
							else
								similarity = 128 - (md - mn);
						}
					}


					if(k=='s'){

						// get hauteur camera

						// Depthframe get 3D position of 1m20 jets et les enregistrer dans un autre fichier pour etre charger par un Observer


						CameraSpacePoint cameraPoint = { 0 };
						DepthSpacePoint depthPoint = { 0 };
						UINT16 depth;

						// Compute hauteur camera, by average of four points
						float h = 0;
						int cptH = 0;
						float d;

						d = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						d = buffer[(int)(fountainYPosition-fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						d = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition-fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						d = buffer[(int)(fountainYPosition+fountainWidth/2.0)*w + (w-1-(int)(fountainXPosition+fountainWidth/2.0))];
						if((d>500)&&(d<3500)){h+=d; cptH++;}
						if(cptH>0){
							//hauteurCamera = h/cptH;
							cout << "H = " << hauteurCamera << endl;
						}

						// Compute real size of blaster
						/*depthPoint.X = static_cast<float>(blasterXPosition[0] - blasterWidth); 
						depthPoint.Y = static_cast<float>(blasterYPosition[0] - blasterWidth); 
						depth = hauteurCamera - 1000.0f; // TODO change
						pCoordinateMapper->MapDepthPointToCameraSpace(depthPoint,depth,&cameraPoint);
						float corner1X = static_cast<float>(cameraPoint.X);
						float corner1Y = static_cast<float>(cameraPoint.Y);


						depthPoint.X = static_cast<float>(blasterXPosition[0] + blasterWidth); 
						depthPoint.Y = static_cast<float>(blasterYPosition[0] + blasterWidth); 
						pCoordinateMapper->MapDepthPointToCameraSpace(depthPoint,depth,&cameraPoint);
						float corner2X = static_cast<float>(cameraPoint.X);
						float corner2Y = static_cast<float>(cameraPoint.Y);*/

						//float realBlasterWidth = 1000.0*(abs(corner2X-corner1X)+abs(corner2Y-corner1Y))/2.0;


						ofstream myfile;
						myfile.open ("source_config.txt");
						myfile << hauteurCamera << "\n";
						myfile << fountainXPosition << "\n";
						myfile << fountainYPosition << "\n";
						myfile << fountainWidth << "\n";
						myfile << blasterWidth << "\n";
						myfile << blasterXPosition.size() << "\n";
						for(int i = 0;i<blasterXPosition.size();i++){
							myfile << blasterXPosition[i] << "\n";
							myfile << blasterYPosition[i] << "\n";
						}
						myfile.close();

						//  save real positions to file

						myfile.open ("source3D.txt");
						myfile << hauteurCamera << "\n";
						myfile << blasterWidth << "\n";
						myfile << blasterXPosition.size() << "\n";
						for(int i = 0;i<blasterXPosition.size();i++){
							depthPoint.X = static_cast<float>(blasterXPosition[i]); 
							depthPoint.Y = static_cast<float>(blasterYPosition[i]); 
							depth = hauteurCamera;
							pCoordinateMapper->MapDepthPointToCameraSpace(depthPoint,depth,&cameraPoint);
							cout << depthPoint.X << " - " << depthPoint.Y << endl;
							cout << static_cast<float>(cameraPoint.X) << " - " << static_cast<float>(cameraPoint.Y) << endl;
							pCoordinateMapper->MapCameraPointToDepthSpace(cameraPoint, &depthPoint);
							cout << depthPoint.X << " - " << depthPoint.Y << endl;
							cout << static_cast<float>(cameraPoint.X) << " - " << static_cast<float>(cameraPoint.Y) << endl;
							myfile << 1000.0*static_cast<float>(cameraPoint.X) << "\n";
							myfile << 1000.0*static_cast<float>(cameraPoint.Y) << "\n";
						}
						myfile.close();


					}


				}
				else{
					return false;
				}

			}
			else 
				return false;

			if( pDepthFrame != NULL ){
				pDepthFrame->Release();
				pDepthFrame = NULL;
			}

			rectangle(frame,
				Rect(fountainXPosition-fountainWidth/2.0, 
				fountainYPosition-fountainWidth/2.0, 
				fountainWidth	, 
				fountainWidth),
				Scalar(0,similarity,255-similarity),
				3);
			for(int i = -2; i <= 2; i++)
			{
				rectangle(frame,
					Rect(fountainXPosition - (i*blasterWidth) - (blasterWidth/2.0), 
					fountainYPosition-fountainWidth/2.0, 
					blasterWidth, 
					fountainWidth),
					Scalar(0,255,0),
					1);
				rectangle(frame,
					Rect(fountainXPosition-fountainWidth/2.0,
					fountainYPosition - (i*blasterWidth) - (blasterWidth/2.0),
					fountainWidth, 
					blasterWidth),
					Scalar(0,255,0),
					1);
			}

			char textbuffer [33];
			for(int i = 0;i<blasterXPosition.size();i++){
				sprintf(textbuffer,"%i",i+1);
				putText(frame,textbuffer, Point2f(blasterXPosition[i]-blasterWidth/2.0,blasterYPosition[i]), FONT_HERSHEY_PLAIN, 1,  Scalar(0,0,255,255), 2);
				//rectangle(frame,Rect(blasterXPosition[i]-blasterWidth/2.0,blasterYPosition[i]-blasterWidth/2.0,blasterWidth,blasterWidth),Scalar(255,0,0));
				circle(frame,Point(blasterXPosition[i],blasterYPosition[i]),blasterWidth/2.0,Scalar(255,0,0));
			}
			resize(frame,display,Size(displaySize*w,displaySize*h));
		}

		cv::imshow("source_config", display);


		k=cv::waitKey(1);	

		if(k == 32) // Space
			displayColor = ! displayColor;
		
		if(k=='a')
			alignBuseFromLayout();
		if(k=='r')
			resetFountainPosition();

	}


	if (pDepthReader)
	{
		pDepthReader->Release();
		pDepthReader = NULL;
	}


	if (pCoordinateMapper)
	{
		pCoordinateMapper->Release();
		pCoordinateMapper = NULL;
	}

	m_pKinectSensor->Close();
	if (m_pKinectSensor)
	{
		m_pKinectSensor->Release();
		m_pKinectSensor = NULL;
	}


	//system("pause");

	return 0;
}
Ejemplo n.º 5
0
int main(int argc, char** argv)
{
   int first_time = 0;
   Size screen_size(1440, 900);//the dst image size,e.g.100x100
   Scalar text_color = Scalar(0, 255, 0);
   Scalar text_color2 = Scalar(0, 255, 255);
   Scalar text_color3 = Scalar(0, 0, 255);

   inhaler_coach coach;
   coach.control = 0;
   thread mThread(test_func, &coach);

   // 1a. Get Kinect Sensor
   cout << "Try to get default sensor" << endl;
   IKinectSensor* pSensor = nullptr;
   if (GetDefaultKinectSensor(&pSensor) != S_OK)
   {
      cerr << "Get Sensor failed" << endl;
      return -1;
   }

   // 1b. Open sensor
   cout << "Try to open sensor" << endl;
   if (pSensor->Open() != S_OK)
   {
      cerr << "Can't open sensor" << endl;
      return -1;
   }

   // 2. Color Related code
   IColorFrameReader* pColorFrameReader = nullptr;
   cv::Mat	mColorImg;
   UINT uBufferSize = 0;
   UINT uColorPointNum = 0;
   int iWidth = 0;
   int iHeight = 0;
   {
      // 2a. Get color frame source
      cout << "Try to get color source" << endl;
      IColorFrameSource* pFrameSource = nullptr;
      if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK)
      {
         cerr << "Can't get color frame source" << endl;
         return -1;
      }

      // 2b. Get frame description
      cout << "get color frame description" << endl;
      IFrameDescription* pFrameDescription = nullptr;
      if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK)
      {
         pFrameDescription->get_Width(&iWidth);
         pFrameDescription->get_Height(&iHeight);
      }
      pFrameDescription->Release();
      pFrameDescription = nullptr;

      // 2c. get frame reader
      cout << "Try to get color frame reader" << endl;
      if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK)
      {
         cerr << "Can't get color frame reader" << endl;
         return -1;
      }

      // 2d. release Frame source
      cout << "Release frame source" << endl;
      pFrameSource->Release();
      pFrameSource = nullptr;

      // Prepare OpenCV data
      mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4);
      uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE);
      uColorPointNum = iHeight * iWidth;
   }
   

   // 3. Depth related code
   IDepthFrameReader* pDepthFrameReader = nullptr;
   UINT uDepthPointNum = 0;
   int iDepthWidth = 0, iDepthHeight = 0;
   cout << "Try to get depth source" << endl;
   {
      // Get frame source
      IDepthFrameSource* pFrameSource = nullptr;
      if (pSensor->get_DepthFrameSource(&pFrameSource) != S_OK)
      {
         cerr << "Can't get depth frame source" << endl;
         return -1;
      }

      // Get frame description
      cout << "get depth frame description" << endl;
      IFrameDescription* pFrameDescription = nullptr;
      if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK)
      {
         pFrameDescription->get_Width(&iDepthWidth);
         pFrameDescription->get_Height(&iDepthHeight);
         uDepthPointNum = iDepthWidth * iDepthHeight;
      }
      pFrameDescription->Release();
      pFrameDescription = nullptr;

      // get frame reader
      cout << "Try to get depth frame reader" << endl;
      if (pFrameSource->OpenReader(&pDepthFrameReader) != S_OK)
      {
         cerr << "Can't get depth frame reader" << endl;
         return -1;
      }

      // release Frame source
      cout << "Release frame source" << endl;
      pFrameSource->Release();
      pFrameSource = nullptr;
   }



   // 4. Body related code
   IBodyFrameReader* pBodyFrameReader = nullptr;
   IBody** aBodyData = nullptr;
   INT32 iBodyCount = 0;
   {
      // 3a. Get frame source
      cout << "Try to get body source" << endl;
      IBodyFrameSource* pFrameSource = nullptr;
      if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK)
      {
         cerr << "Can't get body frame source" << endl;
         return -1;
      }

      // 3b. Get the number of body
      if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK)
      {
         cerr << "Can't get body count" << endl;
         return -1;
      }
      cout << " > Can trace " << iBodyCount << " bodies" << endl;
      aBodyData = new IBody*[iBodyCount];
      for (int i = 0; i < iBodyCount; ++i)
         aBodyData[i] = nullptr;

      // 3c. get frame reader
      cout << "Try to get body frame reader" << endl;
      if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK)
      {
         cerr << "Can't get body frame reader" << endl;
         return -1;
      }

      // 3d. release Frame source
      cout << "Release frame source" << endl;
      pFrameSource->Release();
      pFrameSource = nullptr;
   }
   


   // 4. Body Index releated code
   IBodyIndexFrameReader* pBIFrameReader = nullptr;
   cout << "Try to get body index source" << endl;
   {
      // Get frame source
      IBodyIndexFrameSource* pFrameSource = nullptr;
      if (pSensor->get_BodyIndexFrameSource(&pFrameSource) != S_OK)
      {
         cerr << "Can't get body index frame source" << endl;
         return -1;
      }

      // get frame reader
      cout << "Try to get body index frame reader" << endl;
      if (pFrameSource->OpenReader(&pBIFrameReader) != S_OK)
      {
         cerr << "Can't get depth frame reader" << endl;
         return -1;
      }

      // release Frame source
      cout << "Release frame source" << endl;
      pFrameSource->Release();
      pFrameSource = nullptr;
   }



   // 5. background
   cv::Mat imgBG(iHeight, iWidth, CV_8UC3);
   imgBG.setTo(0);







   // 4. get CoordinateMapper
   ICoordinateMapper* pCoordinateMapper = nullptr;
   if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK)
   {
      cout << "Can't get coordinate mapper" << endl;
      return -1;
   }

   // Enter main loop
   UINT16* pDepthPoints = new UINT16[uDepthPointNum];
   BYTE*	pBodyIndex = new BYTE[uDepthPointNum];
   DepthSpacePoint* pPointArray = new DepthSpacePoint[uColorPointNum];

   cv::namedWindow("Inhaler Coach");
   while (true)
   {
      // 4a. Get last frame
      IColorFrame* pColorFrame = nullptr;
      if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK)
      {
         pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra);
         pColorFrame->Release();
         pColorFrame = nullptr;
      }
      cv::Mat mImg = mColorImg.clone();
     
      // 8b. read depth frame
      IDepthFrame* pDepthFrame = nullptr;
      if (pDepthFrameReader->AcquireLatestFrame(&pDepthFrame) == S_OK)
      {
         pDepthFrame->CopyFrameDataToArray(uDepthPointNum, pDepthPoints);
         pDepthFrame->Release();
         pDepthFrame = nullptr;
      }


      // 8c. read body index frame
      IBodyIndexFrame* pBIFrame = nullptr;
      if (pBIFrameReader->AcquireLatestFrame(&pBIFrame) == S_OK)
      {
         pBIFrame->CopyFrameDataToArray(uDepthPointNum, pBodyIndex);
         pBIFrame->Release();
         pBIFrame = nullptr;
      }

#ifdef COACH_DEBUG
      cv::Mat imgTarget = imgBG.clone();
      // 9b. map color to depth
      if (pCoordinateMapper->MapColorFrameToDepthSpace(uDepthPointNum, pDepthPoints, uColorPointNum, pPointArray) == S_OK)
      {
         for (int y = 0; y < imgTarget.rows; ++y)
         {
            for (int x = 0; x < imgTarget.cols; ++x)
            {
               // ( x, y ) in color frame = rPoint in depth frame
               const DepthSpacePoint& rPoint = pPointArray[y * imgTarget.cols + x];

               // check if rPoint is in range
               if (rPoint.X >= 0 && rPoint.X < iDepthWidth && rPoint.Y >= 0 && rPoint.Y < iDepthHeight)
               {
                  // fill color from color frame if this pixel is user
                  int iIdx = (int)rPoint.X + iDepthWidth * (int)rPoint.Y;
                  if (pBodyIndex[iIdx] < 6)
                  {
                     cv::Vec4b& rPixel = mImg.at<cv::Vec4b>(y, x);
                     imgTarget.at<cv::Vec3b>(y, x) = cv::Vec3b(rPixel[0], rPixel[1], rPixel[2]);
                  }
               }
            }
         }
      }
#else
   cv::Mat imgTarget = mImg.clone();
#endif




      // 4b. Get body data
      IBodyFrame* pBodyFrame = nullptr;
      if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK)
      {

         // 4b. get Body data
         if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK)
         {
            // 4c. for each body
            for (int i = 0; i < iBodyCount; ++i)
            {
               IBody* pBody = aBodyData[i];

               // check if is tracked
               BOOLEAN bTracked = false;
               if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked)
               {
                  // get joint position
                  Joint aJoints[JointType::JointType_Count];
                  if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK)
                  {
                     
                     if (coach.state == 0){
                        coach.state = 1;
                        if (first_time == 0){
                           first_time = 1;
                           PlaySound(TEXT("welcome.wav"), NULL, SND_FILENAME);
                        }
                     }
#ifdef COACH_DEBUG               
                     DrawLine(imgTarget, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper);

                     DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper);
                     //DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper);

                     DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper);
                     DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper);
                     //DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper);
#endif
                     ColorSpacePoint q;
                     ColorSpacePoint head;
                     //ColorSpacePoint w;

                     pCoordinateMapper->MapCameraPointToColorSpace(aJoints[JointType_Head].Position, &head);
                     // check shaking
                     coach.shaking_detection(aJoints, pCoordinateMapper);
                     q = coach.position_checking(aJoints, pCoordinateMapper);
#ifdef COACH_DEBUG
                     circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0);
                     //circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0);
                     rectangle(imgTarget, Point(head.X - 50, head.Y - 40), Point(head.X + 50, head.Y + 90), Scalar(0, 255, 255), 1, 8, 0);
                     //circle(imgTarget, cv::Point(w.X, w.Y), 10, Scalar(255, 0, 255), 10, 8, 0);
#endif
                     coach.state_change_rule();
                  }
               }               
            }
         }
         else
         {
            cerr << "Can't read body data" << endl;
         }

         // 4e. release frame
         pBodyFrame->Release();
      }


      switch (coach.state){
         case 0: putText(imgTarget, "CMU Inhaler Coaching System", Point(120, 120), FONT_HERSHEY_DUPLEX, 2, text_color); break;
         case 1: putText(imgTarget, "Please shake the inhaler", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break;
         case 2: putText(imgTarget, "Shaking detected", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break;
         case 3: putText(imgTarget, "Please put the inhaler in front of your mouth", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break;
         case 4: putText(imgTarget, "Position check OK", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break;
         case 5: putText(imgTarget, "You forget to shake the inhaler first!!!", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color3); break;
      }

      // show image
      Mat dst;
      resize(imgTarget, dst, screen_size);
      imshow("Coach", dst);
      // 4c. check keyboard input
      if (cv::waitKey(30) == VK_ESCAPE){
         break;
      }
   }
   mThread.join();
   
   // 3. delete body data array
   delete[] aBodyData;

   // 3. release frame reader
   cout << "Release body frame reader" << endl;
   pBodyFrameReader->Release();
   pBodyFrameReader = nullptr;

   // 2. release color frame reader
   cout << "Release color frame reader" << endl;
   pColorFrameReader->Release();
   pColorFrameReader = nullptr;

   // 1c. Close Sensor
   cout << "close sensor" << endl;
   pSensor->Close();

   // 1d. Release Sensor
   cout << "Release sensor" << endl;
   pSensor->Release();
   pSensor = nullptr;

   return 0;
}