void RealsensePropGetListener::getValue(imaqkit::IPropInfo* propertyInfo, void* value)
{
	const char* propname = propertyInfo->getPropertyName();

	PXCSenseManager *psm = 0;
	psm = PXCSenseManager::CreateInstance();
	psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 640, 480);
	psm->Init();
	if (strcmp(propname, "color_brightness") == 0)
	{
		PXCCapture::Sample *sample = psm->QuerySample();
		PXCCapture::Device *device = psm->QueryCaptureManager()->QueryDevice();
		pxcI32 brightness = device->QueryColorBrightness();
		*(reinterpret_cast<int*>(value)) = brightness;
	}
	else
	{
		assert(false && "Unhandled property . Need to add this new property.");
	}
	psm->Release();



}
DWORD WINAPI RealSenseAdaptor::acquireThread(void* param) {
	RealSenseAdaptor* adaptor = reinterpret_cast<RealSenseAdaptor*>(param);
	// Set the thread priority.
	SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_TIME_CRITICAL);
	MSG msg;
	while (GetMessage(&msg, NULL, 0, 0) > 0) {
		switch (msg.message) {
		case WM_USER:
		{
			// Check if a frame needs to be acquired
			std::auto_ptr<imaqkit::IAutoCriticalSection> driverSection(imaqkit::createAutoCriticalSection(adaptor->_driverGuard, false));
			// Get frame type & dimensions.

			imaqkit::frametypes::FRAMETYPE frameType =
				adaptor->getFrameType();
			int imWidth = adaptor->getMaxWidth();
			int imHeight = adaptor->getMaxHeight();
			int imBand = adaptor->getNumberOfBands();
			int camera_id = adaptor->getDeviceID();

			PXCSession *session = PXCSession_Create();
			if (!session) return 0;
			PXCSenseManager *psm = session->CreateSenseManager();

			if (imBand == 3)
			{
				if (camera_id == 1)
				{
					PXCCapture::DeviceInfo dinfo = {};
					pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera";
					psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx);
					psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, imWidth, imHeight);

				}
				else
				{
					PXCCapture::DeviceInfo dinfo = {};
					pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera R200";
					psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx);
					psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, imWidth, imHeight, 30);
				}

			}
			else
			{
				if (camera_id == 2)
				{
					PXCCapture::DeviceInfo dinfo = {};
					pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera";
					psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx);
					psm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, imWidth, imHeight);


				}
				else
				{
					PXCCapture::DeviceInfo dinfo = {};
					pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera R200";
					psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx);
					psm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, imWidth, imHeight, 30);
					psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 640, 480, 30);
				}

			}

			UtilRender *renderColor = new UtilRender(L"COLOR STREAM");
			UtilRender *renderDepth = new UtilRender(L"DEPTH STREAM");
			psm->Init();
			PXCImage *colorIm, *depthIm;

			while (adaptor->isAcquisitionNotComplete() && adaptor->isAcquisitionActive()) {
				driverSection->enter();
				if (psm->AcquireFrame(true) < PXC_STATUS_NO_ERROR) break;
				// retrieve all available image samples
				PXCCapture::Sample *sample = psm->QuerySample();
				if (!sample) break;
				unsigned char *imBuffer1 = new unsigned char[imWidth * imHeight * imBand];
				//unsigned short *imBuffer2 = new unsigned short[imWidth * imHeight];
				unsigned char *imBuffer2 = new unsigned char[imWidth * imHeight * 3];
				if (imBand == 3)
				{
					colorIm = sample->color;
					if (!colorIm) break;
					PXCImage::ImageData cdata;
					pxcStatus sts = colorIm->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_RGB32, &cdata);
					for (int ix = 0; ix < imHeight; ix++)
					{
						for (int jx = 0; jx < imWidth; jx++)
						{
							for (int c = 0; c < imBand; c++)
							{
								imBuffer1[(ix * imWidth + jx) * imBand + imBand - 1 - c] = *(pxcBYTE*)(cdata.planes[0] + ix * imWidth * 4 + 4 * jx + c);
							}
						}
					}
					colorIm->ReleaseAccess(&cdata);
					renderColor->RenderFrame(colorIm);

				}
				else
				{
					depthIm = sample->depth;
					if (!depthIm) break;
					
					PXCImage::ImageData ddata;
					pxcStatus sts = depthIm->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_DEPTH, &ddata);
					for (int id = 0; id < imHeight; id++)
					{
					for (int jd = 0; jd < imWidth; jd++)
					{

					imBuffer2[id * imWidth + jd] = *(pxcU16*)(ddata.planes[0] + id * imWidth * 2 + jd * 2);

					}
					}
					depthIm->ReleaseAccess(&ddata);
					renderDepth->RenderFrame(depthIm);
			
				}

				if (adaptor->isSendFrame()) {
					// Create a frame object.
					imaqkit::IAdaptorFrame* frame =
						adaptor->getEngine()->makeFrame(frameType,
						imWidth,
						imHeight);
					// Copy data from buffer into frame object.
					if (imBand == 3)
					{
						frame->setImage(imBuffer1,
							imWidth,
							imHeight,
							0, // X Offset from origin
							0); // Y Offset from origin
					}
					else
					{
						frame->setImage(imBuffer2,
							imWidth,
							imHeight,
							0, // X Offset from origin
							0);
					}

					// Set image's timestamp.
					frame->setTime(imaqkit::getCurrentTime());
					// Send frame object to engine.
					adaptor->getEngine()->receiveFrame(frame);
				} // if isSendFrame()		
				psm->ReleaseFrame();
				// Increment the frame count.
				adaptor->incrementFrameCount();
				// Cleanup. Deallocate imBuffer.
				delete[] imBuffer1;
				delete[] imBuffer2;
				driverSection->leave();
			} // while(isAcquisitionNotComplete()
			delete renderColor;
			delete renderDepth;
			psm->Release();
		}
		break;
		} //switch-case WM_USER
	}//while message is not WM_QUIT
	return 0;
}
/*******************************************************************************
Main entry point. Here we demonstrate, after some initial housekeeping, how to
initialize the camera, start streaming, grab samples, and process them.
*******************************************************************************/
int main(int argc, char *argv[]) {
	if (!ProcessCmdArgs(argc, argv)) {
		return 1;
	}

	// Check / create file directories
	if (xdmPath != nullptr) {
		if (_mkdir(xdmPath) != 0 && errno != EEXIST){
			fprintf(stderr, "Error: Invalid XDM path. Error %d\n", errno);
			fprintf(stderr, "Terminate? [Y/n]\n");
			char choice = _getch();
			if (choice != 'n' && choice != 'N') {
				return ERROR_FILE;
			}
			xdmPath = nullptr;
		}
		else {
			// Remove any trailing '\' in the path since we'll add it later.
			if (xdmPath[strlen(xdmPath) - 1] == '\\')
				xdmPath[strlen(xdmPath) - 1] = '0';
		}
	}
	if (bufPath != nullptr) {
		if (_mkdir(bufPath) != 0 && errno != EEXIST){
			fprintf(stderr, "Error: Invalid Buffer path. Error %d\n", errno);
			fprintf(stderr, "Terminate? [Y/n]\n");
			char choice = _getch();
			if (choice != 'n' && choice != 'N') {
				return ERROR_FILE;
			}
			bufPath = nullptr;
		}
		else {
			// Remove any trailing '\' in the path since we'll add it later.
			if (bufPath[strlen(bufPath) - 1] == '\\')
				bufPath[strlen(bufPath) - 1] = '0';
		}
	}

	// Start timer for total test execution
	unsigned long long programStart = GetTickCount64();
	
	// Initialize camera and streams
	if (verbose) fprintf_s(stdout, "Initializing camera...\n");
	// The Sense Manager is the root object for interacting with the camera.
	PXCSenseManager *senseManager = nullptr;
	senseManager = PXCSenseManager::CreateInstance();
	if (!senseManager) {
		fprintf_s(stderr, "Unable to create the PXCSenseManager\n");
		return ERROR_CAMERA;
	}

	// When enabling the streams (color and depth), the parameters must match
	// the capabilities of the camera. For example, 60fps for color will fail
	// on the DS4 / R200.
	// Here we're hard-coding the resolution and frame rate
	senseManager->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 320, 240, 30);
	senseManager->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, 320, 240, 30);

	// Initialize the PXCSenseManager
	pxcStatus camStatus;
	camStatus = senseManager->Init();
	if (camStatus != PXC_STATUS_NO_ERROR) {
		fprintf_s(stderr, "Unable to initizlize PXCSenseManager\n");
		senseManager->Release();
		return ERROR_CAMERA;
	}
	
	PXCImage *colorImage;
	PXCImage *depthImage;
	PXCPhoto *xdmPhoto = senseManager->QuerySession()->CreatePhoto();

	// These two objects come from the Intel RealSense SDK helper for rendering
	// camera data. Any rendering technique could be used or omitted if no 
	// visual feedback is desired.
	UtilRender *renderColor = new UtilRender(L"COLOR STREAM");
	UtilRender *renderDepth = new UtilRender(L"DEPTH STREAM");
	
	// Start test
	if (verbose) fprintf_s(stdout, "Running...\n");
	// This section may be wrapped in additional code to automate 
	// repetitive tests. Closure provided just for convenience.
	{ // Beginning of single test block
		unsigned long totalFrames = 0;
		unsigned long long streamStart = GetTickCount64();
		for (unsigned int i = 0; i < framesPerTest; i++) {
			// Passing 'true' to AcquireFrame blocks until all streams are 
			// ready (depth and color).  Passing 'false' will result in 
			// frames unaligned in time.
			camStatus = senseManager->AcquireFrame(true);
			if (camStatus < PXC_STATUS_NO_ERROR) {
				fprintf_s(stderr, "Error acquiring frame: %f\n", camStatus);
				break;
			}
			// Retrieve all available image samples
			PXCCapture::Sample *sample = senseManager->QuerySample();
			if (sample == NULL) {
				fprintf_s(stderr, "Sample unavailable\n");
			}
			else {
				totalFrames++;

				colorImage = sample->color;
				depthImage = sample->depth;
				// Render the frames (not necessary)
				if (!renderColor->RenderFrame(colorImage)) break;
				if (!renderDepth->RenderFrame(depthImage)) break;

				// Save data if requested
				wchar_t filePath[MAXPATH];
				char fileName[18];
				TimeStampString(fileName);
				if (xdmPath != nullptr) { // Save XDM image
					swprintf_s(filePath, MAXPATH, L"%S\\%S%S", 
						xdmPath, 
						fileName,
						xdmExt);
					xdmPhoto->ImportFromPreviewSample(sample);
					pxcStatus saveStatus = xdmPhoto->SaveXDM(filePath);
					if (saveStatus != PXC_STATUS_NO_ERROR)
						fprintf_s(stderr, "Error: SaveXDM\n");
				}
				if (bufPath != NULL) { // Save depth buffer
					swprintf_s(filePath, MAXPATH, L"%S\\%S%S", 
						bufPath, fileName, depBufExt);
					// The WriteDepthBuf function has a lot of detail about
					// accessing image data and should be examined.
					if (!WriteDepthBuf(depthImage, filePath)) {
						fprintf(stderr, "Error: WriteDepthBuf\n");
					}
				}
			}

			// Unlock the current frame to fetch the next one
			senseManager->ReleaseFrame();

		}

		float frameRate = (float)totalFrames /
			((float)(GetTickCount64() - streamStart) / 1000);
		if (verbose) fprintf_s(stdout, "Frame Rate: %.2f\n", frameRate);
	} // End of single test block

	// Wrap up
	if(verbose) fprintf_s(stdout, "Finished in %llu seconds.\n",
		(GetTickCount64() - programStart) / 1000);

	delete renderColor;
	delete renderDepth;
	xdmPhoto->Release();
	if(senseManager != nullptr) senseManager->Release();

	return 0;
}
int main(char* args,char* argsc) {

	pxcStatus status = PXC_STATUS_ALLOC_FAILED;

	PXCSenseManager *session =  PXCSenseManager::CreateInstance();
	if(!session) {
		printf("Instance create failed\n");
		return 10;
	}

	status = session->EnableHand(nullptr);

	if(status = PXC_STATUS_NO_ERROR) {
		printf("Hand data unavailable\n");
		return 20;
	}

	session->Init();

	if(status = PXC_STATUS_NO_ERROR) {
		printf("init failed\n");
		return 30;
	}

	PXCHandModule* handTracker = session->QueryHand();
	

	if(status = PXC_STATUS_NO_ERROR) {
		printf("no hand tracking support\n");
		return 40;
	}

	PXCHandConfiguration* handConfig = handTracker->CreateActiveConfiguration();
	handConfig->EnableAllGestures();
	handConfig->ApplyChanges();

	PXCHandData* handData = handTracker->CreateOutput();
	bool running = true;

	status = session->EnableStream(PXCCapture::StreamType::STREAM_TYPE_DEPTH,1920,1080,30.0);

	if (status = PXC_STATUS_NO_ERROR) {
		printf("Unknown error when enabling stream.");
		return 50;
	}

	while(running) {
		//printf("Acquire frame. ");
		status = session->AcquireFrame(true);
		//printf("Got frame.\n");
		if(status >= PXC_STATUS_NO_ERROR) {
			printf("He's dead Jim.\n");
			return 50;
		}

		handData->Update();
		//printf("Got %i gestures for %i hands.\n", handData->QueryFiredGesturesNumber(),handData->QueryNumberOfHands());
		for(int i=0; i<handData->QueryFiredGesturesNumber(); i++) {
			PXCHandData::GestureData gestureData;
			handData->QueryFiredGestureData(i,gestureData);
			
			
			wchar_t* name = (wchar_t*)gestureData.name;
			std::wstring nameStr(name);
			std::wcout << nameStr << std::endl;
			
			//printf("%s - len %d\n",nameStr.c_str(), nameStr.size());

			if(nameStr == L"v_sign") {
				running = false;
			}
		}

		PXCCapture::Sample* capture = session->QuerySample();
		PXCImage* depthImage = capture->depth;

		//std::this_thread::sleep_for(std::chrono::milliseconds(10));

		session->ReleaseFrame();
	}

	session->Release();

	return 0;
}
void Processor::Process(HWND dialogWindow) {

	// set startup mode
	PXCSenseManager* senseManager = session->CreateSenseManager();

	if (senseManager == NULL) {

		Utilities::SetStatus(dialogWindow, L"Failed to create an SDK SenseManager", statusPart);
		return;

	}

	/* Set Mode & Source */
	PXCCaptureManager* captureManager = senseManager->QueryCaptureManager();

	pxcStatus status = PXC_STATUS_NO_ERROR;

	if (Utilities::GetPlaybackState(dialogWindow)) {

		status = captureManager->SetFileName(m_rssdkFilename, false);
		senseManager->QueryCaptureManager()->SetRealtime(true);

	}

	if (status < PXC_STATUS_NO_ERROR) {

		Utilities::SetStatus(dialogWindow, L"Failed to Set Record/Playback File", statusPart);
		return;

	}

	/* Set Module */
	senseManager->EnableFace();

	/* Initialize */
	Utilities::SetStatus(dialogWindow, L"Init Started", statusPart);

	PXCFaceModule* faceModule = senseManager->QueryFace();
	
	if (faceModule == NULL) {

		assert(faceModule);
		return;

	}

	PXCFaceConfiguration* config = faceModule->CreateActiveConfiguration();
	
	if (config == NULL) {

		assert(config);
		return;

	}

	// Enable Gaze Algo
	config->QueryGaze()->isEnabled = true;

	// set dominant eye
	if (dominant_eye) {
	
		PXCFaceData::GazeCalibData::DominantEye eye = (PXCFaceData::GazeCalibData::DominantEye)(dominant_eye - 1);
		config->QueryGaze()->SetDominantEye(eye);

	}

	// set tracking mode
	config->SetTrackingMode(PXCFaceConfiguration::TrackingModeType::FACE_MODE_COLOR_PLUS_DEPTH);
	config->ApplyChanges();

	// Load Calibration File
	bool need_calibration = true;

	if (isLoadCalibFile) {

		FILE* my_file;
		errno_t err;

		err = _wfopen_s(&my_file, m_CalibFilename, L"rb");

		if (!err && my_file) {

			if (calibBuffer == NULL) {

				calibBuffersize = config->QueryGaze()->QueryCalibDataSize();
				calibBuffer = new unsigned char[calibBuffersize];

			}

			fread(calibBuffer, calibBuffersize, sizeof(pxcBYTE), my_file);
			fclose(my_file);

			pxcStatus st = config->QueryGaze()->LoadCalibration(calibBuffer, calibBuffersize);

			if (st != PXC_STATUS_NO_ERROR) {

				// get save file name
				calib_status = LOAD_CALIBRATION_ERROR;
				need_calibration = false;
				PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0);
				return;
				
			}

		}

		isLoadCalibFile = false;
		need_calibration = false;
		PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_LOADED, 0);

	} else if (calibBuffer) {

		// load existing calib stored in memory
		config->QueryGaze()->LoadCalibration(calibBuffer, calibBuffersize);
		need_calibration = false;

	}

	// init sense manager
	if (senseManager->Init() < PXC_STATUS_NO_ERROR) {

		captureManager->FilterByStreamProfiles(NULL);

		if (senseManager->Init() < PXC_STATUS_NO_ERROR) {

			Utilities::SetStatus(dialogWindow, L"Init Failed", statusPart);
			PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0);
			return;

		}

	}

	PXCCapture::DeviceInfo info;
	senseManager->QueryCaptureManager()->QueryDevice()->QueryDeviceInfo(&info);

    CheckForDepthStream(senseManager, dialogWindow);
    AlertHandler alertHandler(dialogWindow);

	config->detection.isEnabled = true;
	config->landmarks.isEnabled = true;
	config->pose.isEnabled = true;
			
    config->EnableAllAlerts();
    config->SubscribeAlert(&alertHandler);

    config->ApplyChanges();

    //}
	

    Utilities::SetStatus(dialogWindow, L"Streaming", statusPart);
    m_output = faceModule->CreateOutput();

	int failed_counter = 0;

    bool isNotFirstFrame = false;
    bool isFinishedPlaying = false;
    bool activeapp = true;
    ResetEvent(renderer->GetRenderingFinishedSignal());

	renderer->SetSenseManager(senseManager);
    renderer->SetNumberOfLandmarks(config->landmarks.numLandmarks);
    renderer->SetCallback(renderer->SignalProcessor);

	// Creating PXCSmoother instance
	PXCSmoother* smoother = NULL;
	senseManager->QuerySession()->CreateImpl<PXCSmoother>(&smoother);

	// Creating 2D smoother with quadratic algorithm with smooth value
	PXCSmoother::Smoother2D* smoother2D = smoother->Create2DQuadratic(1.0f);

	// acquisition loop
    if (!isStopped) {

        while (true) {

			if (isPaused) { 
				// allow the application to pause for user input

				Sleep(200);
				continue;

			}

            if (senseManager->AcquireFrame(true) < PXC_STATUS_NO_ERROR) {

                isFinishedPlaying = true;

            }

            if (isNotFirstFrame) {

                WaitForSingleObject(renderer->GetRenderingFinishedSignal(), INFINITE);

            } else { 
				// enable back window

				if (need_calibration) EnableBackWindow();

			}

            if (isFinishedPlaying || isStopped) {

                if (isStopped) senseManager->ReleaseFrame();
                if (isFinishedPlaying) PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0);
                break;

            }

            m_output->Update();
			pxcI64 stamp =  m_output->QueryFrameTimestamp();
            PXCCapture::Sample* sample = senseManager->QueryFaceSample();
            isNotFirstFrame = true;

            if (sample != NULL) {

				DWORD dwWaitResult;
				dwWaitResult = WaitForSingleObject(g_hMutex,	INFINITE);
				
				if (dwWaitResult == WAIT_OBJECT_0) {

					// check calibration state
					if (need_calibration) {

						// CALIBRATION FLOW
						if (m_output->QueryNumberOfDetectedFaces()) {

							PXCFaceData::Face* trackedFace = m_output->QueryFaceByIndex(0);
							PXCFaceData::GazeCalibData* gazeData = trackedFace->QueryGazeCalibration();

							if (gazeData) { 

								// gaze enabled check calibration
								PXCFaceData::GazeCalibData::CalibrationState state = trackedFace->QueryGazeCalibration()->QueryCalibrationState();

								if (state == PXCFaceData::GazeCalibData::CALIBRATION_NEW_POINT) {

									// present new point for calibration
									PXCPointI32 new_point = trackedFace->QueryGazeCalibration()->QueryCalibPoint();
									
									// set the cursor to that point
									eye_point_x = new_point.x;
									eye_point_y = new_point.y;
									SetCursorPos(OUT_OF_SCREEN, OUT_OF_SCREEN);

								} else if (state == PXCFaceData::GazeCalibData::CALIBRATION_DONE) {

									// store calib data in a file
									calibBuffersize = trackedFace->QueryGazeCalibration()->QueryCalibDataSize();
									if (calibBuffer == NULL) calibBuffer = new unsigned char[calibBuffersize];
									calib_status = trackedFace->QueryGazeCalibration()->QueryCalibData(calibBuffer);
									dominant_eye = trackedFace->QueryGazeCalibration()->QueryCalibDominantEye();

									// get save file name
									PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0);
									need_calibration = false;

								} else  if (state == PXCFaceData::GazeCalibData::CALIBRATION_IDLE) {

									// set the cursor beyond the screen
									eye_point_x = OUT_OF_SCREEN;
									eye_point_y = OUT_OF_SCREEN;
									SetCursorPos(OUT_OF_SCREEN, OUT_OF_SCREEN);

								}

							} else { 
								// gaze not enabled stop processing

								need_calibration = false;
								PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0);

							}

						} else {

							failed_counter++; 
							// wait 20 frames , if no detection happens go to failed mode

							if (failed_counter > NO_DETECTION_FOR_LONG) {

								calib_status = 3; // failed
								need_calibration = false;
								PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0);

							}

						}

					} else {

						// GAZE PROCESSING AFTER CALIBRATION IS DONE
						if (m_output->QueryNumberOfDetectedFaces()) {

							PXCFaceData::Face* trackedFace = m_output->QueryFaceByIndex(0);
							
							// get gaze point
							if (trackedFace != NULL) {

								if (trackedFace->QueryGaze()) {
									
									PXCFaceData::GazePoint gaze_point = trackedFace->QueryGaze()->QueryGazePoint();

									PXCPointF32 new_point;
									
									new_point.x = (pxcF32)gaze_point.screenPoint.x;
									new_point.y = (pxcF32)gaze_point.screenPoint.y;
									
									// Smoothing
									PXCPointF32 smoothed2DPoint = smoother2D->SmoothValue(new_point);

									pxcF64 horizontal_angle = trackedFace->QueryGaze()->QueryGazeHorizontalAngle();
									pxcF64 vertical_angle = trackedFace->QueryGaze()->QueryGazeVerticalAngle();
									
									eye_horizontal_angle = (float)horizontal_angle;
									eye_vertical_angle = (float)vertical_angle;
									eye_point_x = (int)smoothed2DPoint.x;
									eye_point_y = (int)smoothed2DPoint.y;
								}

							}

						}

					}

					// render output
					renderer->DrawBitmap(sample);
					renderer->SetOutput(m_output);
					renderer->SignalRenderer();

					if (!ReleaseMutex(g_hMutex)) {
						
						throw std::exception("Failed to release mutex");
						return;

					}

				}

            }

            senseManager->ReleaseFrame();

        }

        m_output->Release();
        Utilities::SetStatus(dialogWindow, L"Stopped", statusPart);

    }

	config->Release();
	senseManager->Close(); 
	senseManager->Release();
}