void RealsensePropGetListener::getValue(imaqkit::IPropInfo* propertyInfo, void* value)
{
	const char* propname = propertyInfo->getPropertyName();

	PXCSenseManager *psm = 0;
	psm = PXCSenseManager::CreateInstance();
	psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 640, 480);
	psm->Init();
	if (strcmp(propname, "color_brightness") == 0)
	{
		PXCCapture::Sample *sample = psm->QuerySample();
		PXCCapture::Device *device = psm->QueryCaptureManager()->QueryDevice();
		pxcI32 brightness = device->QueryColorBrightness();
		*(reinterpret_cast<int*>(value)) = brightness;
	}
	else
	{
		assert(false && "Unhandled property . Need to add this new property.");
	}
	psm->Release();



}
DWORD WINAPI RealSenseAdaptor::acquireThread(void* param) {
	RealSenseAdaptor* adaptor = reinterpret_cast<RealSenseAdaptor*>(param);
	// Set the thread priority.
	SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_TIME_CRITICAL);
	MSG msg;
	while (GetMessage(&msg, NULL, 0, 0) > 0) {
		switch (msg.message) {
		case WM_USER:
		{
			// Check if a frame needs to be acquired
			std::auto_ptr<imaqkit::IAutoCriticalSection> driverSection(imaqkit::createAutoCriticalSection(adaptor->_driverGuard, false));
			// Get frame type & dimensions.

			imaqkit::frametypes::FRAMETYPE frameType =
				adaptor->getFrameType();
			int imWidth = adaptor->getMaxWidth();
			int imHeight = adaptor->getMaxHeight();
			int imBand = adaptor->getNumberOfBands();
			int camera_id = adaptor->getDeviceID();

			PXCSession *session = PXCSession_Create();
			if (!session) return 0;
			PXCSenseManager *psm = session->CreateSenseManager();

			if (imBand == 3)
			{
				if (camera_id == 1)
				{
					PXCCapture::DeviceInfo dinfo = {};
					pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera";
					psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx);
					psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, imWidth, imHeight);

				}
				else
				{
					PXCCapture::DeviceInfo dinfo = {};
					pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera R200";
					psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx);
					psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, imWidth, imHeight, 30);
				}

			}
			else
			{
				if (camera_id == 2)
				{
					PXCCapture::DeviceInfo dinfo = {};
					pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera";
					psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx);
					psm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, imWidth, imHeight);


				}
				else
				{
					PXCCapture::DeviceInfo dinfo = {};
					pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera R200";
					psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx);
					psm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, imWidth, imHeight, 30);
					psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 640, 480, 30);
				}

			}

			UtilRender *renderColor = new UtilRender(L"COLOR STREAM");
			UtilRender *renderDepth = new UtilRender(L"DEPTH STREAM");
			psm->Init();
			PXCImage *colorIm, *depthIm;

			while (adaptor->isAcquisitionNotComplete() && adaptor->isAcquisitionActive()) {
				driverSection->enter();
				if (psm->AcquireFrame(true) < PXC_STATUS_NO_ERROR) break;
				// retrieve all available image samples
				PXCCapture::Sample *sample = psm->QuerySample();
				if (!sample) break;
				unsigned char *imBuffer1 = new unsigned char[imWidth * imHeight * imBand];
				//unsigned short *imBuffer2 = new unsigned short[imWidth * imHeight];
				unsigned char *imBuffer2 = new unsigned char[imWidth * imHeight * 3];
				if (imBand == 3)
				{
					colorIm = sample->color;
					if (!colorIm) break;
					PXCImage::ImageData cdata;
					pxcStatus sts = colorIm->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_RGB32, &cdata);
					for (int ix = 0; ix < imHeight; ix++)
					{
						for (int jx = 0; jx < imWidth; jx++)
						{
							for (int c = 0; c < imBand; c++)
							{
								imBuffer1[(ix * imWidth + jx) * imBand + imBand - 1 - c] = *(pxcBYTE*)(cdata.planes[0] + ix * imWidth * 4 + 4 * jx + c);
							}
						}
					}
					colorIm->ReleaseAccess(&cdata);
					renderColor->RenderFrame(colorIm);

				}
				else
				{
					depthIm = sample->depth;
					if (!depthIm) break;
					
					PXCImage::ImageData ddata;
					pxcStatus sts = depthIm->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_DEPTH, &ddata);
					for (int id = 0; id < imHeight; id++)
					{
					for (int jd = 0; jd < imWidth; jd++)
					{

					imBuffer2[id * imWidth + jd] = *(pxcU16*)(ddata.planes[0] + id * imWidth * 2 + jd * 2);

					}
					}
					depthIm->ReleaseAccess(&ddata);
					renderDepth->RenderFrame(depthIm);
			
				}

				if (adaptor->isSendFrame()) {
					// Create a frame object.
					imaqkit::IAdaptorFrame* frame =
						adaptor->getEngine()->makeFrame(frameType,
						imWidth,
						imHeight);
					// Copy data from buffer into frame object.
					if (imBand == 3)
					{
						frame->setImage(imBuffer1,
							imWidth,
							imHeight,
							0, // X Offset from origin
							0); // Y Offset from origin
					}
					else
					{
						frame->setImage(imBuffer2,
							imWidth,
							imHeight,
							0, // X Offset from origin
							0);
					}

					// Set image's timestamp.
					frame->setTime(imaqkit::getCurrentTime());
					// Send frame object to engine.
					adaptor->getEngine()->receiveFrame(frame);
				} // if isSendFrame()		
				psm->ReleaseFrame();
				// Increment the frame count.
				adaptor->incrementFrameCount();
				// Cleanup. Deallocate imBuffer.
				delete[] imBuffer1;
				delete[] imBuffer2;
				driverSection->leave();
			} // while(isAcquisitionNotComplete()
			delete renderColor;
			delete renderDepth;
			psm->Release();
		}
		break;
		} //switch-case WM_USER
	}//while message is not WM_QUIT
	return 0;
}
示例#3
0
void ITA_ForcesApp::setupRS()
{
	mHasHand = false;
	mIsRunning = false;

	mRS = PXCSenseManager::CreateInstance();
	if (mRS == nullptr)
	{
		console() << "Unable to Create SenseManager" << endl;
		return;
	}
	else
	{
		console() << "Created SenseManager" << endl;
		auto stat = mRS->EnableStream(PXCCapture::STREAM_TYPE_COLOR, RGB_SIZE.x, RGB_SIZE.y, 60);
		if (stat >= PXC_STATUS_NO_ERROR) {
			console() << "Color Stream Enabled" << endl;
		}
		stat = mRS->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, Z_SIZE.x, Z_SIZE.y, 60);
		if (stat >= PXC_STATUS_NO_ERROR) {
			console() << "Depth Stream Enabled" << endl;
		}

		stat = mRS->EnableHand();

		if ((stat >= PXC_STATUS_NO_ERROR))//&& (handModule != nullptr))
		{
			stat = mRS->Init();
			auto handModule = mRS->QueryHand();

			if (stat >= PXC_STATUS_NO_ERROR)
			{
				auto depthSize = mRS->QueryCaptureManager()->QueryImageSize(PXCCapture::STREAM_TYPE_DEPTH);
				console() << "DepthSize: " << to_string(depthSize.width) << " " << to_string(depthSize.height) << endl;

				auto colorSize = mRS->QueryCaptureManager()->QueryImageSize(PXCCapture::STREAM_TYPE_COLOR);
				console() << "ColorSize: " << to_string(colorSize.width) << " " << to_string(colorSize.height) << endl;

				mHandData = handModule->CreateOutput();
				auto cfg = handModule->CreateActiveConfiguration();
				if (cfg != nullptr)
				{
					stat = cfg->SetTrackingMode(PXCHandData::TRACKING_MODE_CURSOR);
					if (stat >= PXC_STATUS_NO_ERROR)
						stat = cfg->EnableAllAlerts();
					if (stat >= PXC_STATUS_NO_ERROR)
						stat = cfg->EnableAllGestures(false);
					if (stat >= PXC_STATUS_NO_ERROR)
						stat = cfg->ApplyChanges();
					if (stat >= PXC_STATUS_NO_ERROR)
						cfg->Update();
					if (stat >= PXC_STATUS_NO_ERROR)
					{
						console() << "Hand Tracking Enabled" << endl;
						cfg->Release();
						mRS->QueryCaptureManager()->QueryDevice()->SetMirrorMode(PXCCapture::Device::MIRROR_MODE_HORIZONTAL);
						mIsRunning = true;
					}
				}
				else {
					console() << "Unable to Configure Hand Tracking" << endl;
				}
				mMapper = mRS->QueryCaptureManager()->QueryDevice()->CreateProjection();
				if (mMapper==nullptr) {
					CI_LOG_W("Unable to get coordinate mapper");
				}
			}
			else
				console() << "Unable to Start SenseManager" << endl;
		}
	}
}
示例#4
0
void Processor::Process(HWND dialogWindow) {

	// set startup mode
	PXCSenseManager* senseManager = session->CreateSenseManager();

	if (senseManager == NULL) {

		Utilities::SetStatus(dialogWindow, L"Failed to create an SDK SenseManager", statusPart);
		return;

	}

	/* Set Mode & Source */
	PXCCaptureManager* captureManager = senseManager->QueryCaptureManager();

	pxcStatus status = PXC_STATUS_NO_ERROR;

	if (Utilities::GetPlaybackState(dialogWindow)) {

		status = captureManager->SetFileName(m_rssdkFilename, false);
		senseManager->QueryCaptureManager()->SetRealtime(true);

	}

	if (status < PXC_STATUS_NO_ERROR) {

		Utilities::SetStatus(dialogWindow, L"Failed to Set Record/Playback File", statusPart);
		return;

	}

	/* Set Module */
	senseManager->EnableFace();

	/* Initialize */
	Utilities::SetStatus(dialogWindow, L"Init Started", statusPart);

	PXCFaceModule* faceModule = senseManager->QueryFace();
	
	if (faceModule == NULL) {

		assert(faceModule);
		return;

	}

	PXCFaceConfiguration* config = faceModule->CreateActiveConfiguration();
	
	if (config == NULL) {

		assert(config);
		return;

	}

	// Enable Gaze Algo
	config->QueryGaze()->isEnabled = true;

	// set dominant eye
	if (dominant_eye) {
	
		PXCFaceData::GazeCalibData::DominantEye eye = (PXCFaceData::GazeCalibData::DominantEye)(dominant_eye - 1);
		config->QueryGaze()->SetDominantEye(eye);

	}

	// set tracking mode
	config->SetTrackingMode(PXCFaceConfiguration::TrackingModeType::FACE_MODE_COLOR_PLUS_DEPTH);
	config->ApplyChanges();

	// Load Calibration File
	bool need_calibration = true;

	if (isLoadCalibFile) {

		FILE* my_file;
		errno_t err;

		err = _wfopen_s(&my_file, m_CalibFilename, L"rb");

		if (!err && my_file) {

			if (calibBuffer == NULL) {

				calibBuffersize = config->QueryGaze()->QueryCalibDataSize();
				calibBuffer = new unsigned char[calibBuffersize];

			}

			fread(calibBuffer, calibBuffersize, sizeof(pxcBYTE), my_file);
			fclose(my_file);

			pxcStatus st = config->QueryGaze()->LoadCalibration(calibBuffer, calibBuffersize);

			if (st != PXC_STATUS_NO_ERROR) {

				// get save file name
				calib_status = LOAD_CALIBRATION_ERROR;
				need_calibration = false;
				PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0);
				return;
				
			}

		}

		isLoadCalibFile = false;
		need_calibration = false;
		PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_LOADED, 0);

	} else if (calibBuffer) {

		// load existing calib stored in memory
		config->QueryGaze()->LoadCalibration(calibBuffer, calibBuffersize);
		need_calibration = false;

	}

	// init sense manager
	if (senseManager->Init() < PXC_STATUS_NO_ERROR) {

		captureManager->FilterByStreamProfiles(NULL);

		if (senseManager->Init() < PXC_STATUS_NO_ERROR) {

			Utilities::SetStatus(dialogWindow, L"Init Failed", statusPart);
			PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0);
			return;

		}

	}

	PXCCapture::DeviceInfo info;
	senseManager->QueryCaptureManager()->QueryDevice()->QueryDeviceInfo(&info);

    CheckForDepthStream(senseManager, dialogWindow);
    AlertHandler alertHandler(dialogWindow);

	config->detection.isEnabled = true;
	config->landmarks.isEnabled = true;
	config->pose.isEnabled = true;
			
    config->EnableAllAlerts();
    config->SubscribeAlert(&alertHandler);

    config->ApplyChanges();

    //}
	

    Utilities::SetStatus(dialogWindow, L"Streaming", statusPart);
    m_output = faceModule->CreateOutput();

	int failed_counter = 0;

    bool isNotFirstFrame = false;
    bool isFinishedPlaying = false;
    bool activeapp = true;
    ResetEvent(renderer->GetRenderingFinishedSignal());

	renderer->SetSenseManager(senseManager);
    renderer->SetNumberOfLandmarks(config->landmarks.numLandmarks);
    renderer->SetCallback(renderer->SignalProcessor);

	// Creating PXCSmoother instance
	PXCSmoother* smoother = NULL;
	senseManager->QuerySession()->CreateImpl<PXCSmoother>(&smoother);

	// Creating 2D smoother with quadratic algorithm with smooth value
	PXCSmoother::Smoother2D* smoother2D = smoother->Create2DQuadratic(1.0f);

	// acquisition loop
    if (!isStopped) {

        while (true) {

			if (isPaused) { 
				// allow the application to pause for user input

				Sleep(200);
				continue;

			}

            if (senseManager->AcquireFrame(true) < PXC_STATUS_NO_ERROR) {

                isFinishedPlaying = true;

            }

            if (isNotFirstFrame) {

                WaitForSingleObject(renderer->GetRenderingFinishedSignal(), INFINITE);

            } else { 
				// enable back window

				if (need_calibration) EnableBackWindow();

			}

            if (isFinishedPlaying || isStopped) {

                if (isStopped) senseManager->ReleaseFrame();
                if (isFinishedPlaying) PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0);
                break;

            }

            m_output->Update();
			pxcI64 stamp =  m_output->QueryFrameTimestamp();
            PXCCapture::Sample* sample = senseManager->QueryFaceSample();
            isNotFirstFrame = true;

            if (sample != NULL) {

				DWORD dwWaitResult;
				dwWaitResult = WaitForSingleObject(g_hMutex,	INFINITE);
				
				if (dwWaitResult == WAIT_OBJECT_0) {

					// check calibration state
					if (need_calibration) {

						// CALIBRATION FLOW
						if (m_output->QueryNumberOfDetectedFaces()) {

							PXCFaceData::Face* trackedFace = m_output->QueryFaceByIndex(0);
							PXCFaceData::GazeCalibData* gazeData = trackedFace->QueryGazeCalibration();

							if (gazeData) { 

								// gaze enabled check calibration
								PXCFaceData::GazeCalibData::CalibrationState state = trackedFace->QueryGazeCalibration()->QueryCalibrationState();

								if (state == PXCFaceData::GazeCalibData::CALIBRATION_NEW_POINT) {

									// present new point for calibration
									PXCPointI32 new_point = trackedFace->QueryGazeCalibration()->QueryCalibPoint();
									
									// set the cursor to that point
									eye_point_x = new_point.x;
									eye_point_y = new_point.y;
									SetCursorPos(OUT_OF_SCREEN, OUT_OF_SCREEN);

								} else if (state == PXCFaceData::GazeCalibData::CALIBRATION_DONE) {

									// store calib data in a file
									calibBuffersize = trackedFace->QueryGazeCalibration()->QueryCalibDataSize();
									if (calibBuffer == NULL) calibBuffer = new unsigned char[calibBuffersize];
									calib_status = trackedFace->QueryGazeCalibration()->QueryCalibData(calibBuffer);
									dominant_eye = trackedFace->QueryGazeCalibration()->QueryCalibDominantEye();

									// get save file name
									PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0);
									need_calibration = false;

								} else  if (state == PXCFaceData::GazeCalibData::CALIBRATION_IDLE) {

									// set the cursor beyond the screen
									eye_point_x = OUT_OF_SCREEN;
									eye_point_y = OUT_OF_SCREEN;
									SetCursorPos(OUT_OF_SCREEN, OUT_OF_SCREEN);

								}

							} else { 
								// gaze not enabled stop processing

								need_calibration = false;
								PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0);

							}

						} else {

							failed_counter++; 
							// wait 20 frames , if no detection happens go to failed mode

							if (failed_counter > NO_DETECTION_FOR_LONG) {

								calib_status = 3; // failed
								need_calibration = false;
								PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0);

							}

						}

					} else {

						// GAZE PROCESSING AFTER CALIBRATION IS DONE
						if (m_output->QueryNumberOfDetectedFaces()) {

							PXCFaceData::Face* trackedFace = m_output->QueryFaceByIndex(0);
							
							// get gaze point
							if (trackedFace != NULL) {

								if (trackedFace->QueryGaze()) {
									
									PXCFaceData::GazePoint gaze_point = trackedFace->QueryGaze()->QueryGazePoint();

									PXCPointF32 new_point;
									
									new_point.x = (pxcF32)gaze_point.screenPoint.x;
									new_point.y = (pxcF32)gaze_point.screenPoint.y;
									
									// Smoothing
									PXCPointF32 smoothed2DPoint = smoother2D->SmoothValue(new_point);

									pxcF64 horizontal_angle = trackedFace->QueryGaze()->QueryGazeHorizontalAngle();
									pxcF64 vertical_angle = trackedFace->QueryGaze()->QueryGazeVerticalAngle();
									
									eye_horizontal_angle = (float)horizontal_angle;
									eye_vertical_angle = (float)vertical_angle;
									eye_point_x = (int)smoothed2DPoint.x;
									eye_point_y = (int)smoothed2DPoint.y;
								}

							}

						}

					}

					// render output
					renderer->DrawBitmap(sample);
					renderer->SetOutput(m_output);
					renderer->SignalRenderer();

					if (!ReleaseMutex(g_hMutex)) {
						
						throw std::exception("Failed to release mutex");
						return;

					}

				}

            }

            senseManager->ReleaseFrame();

        }

        m_output->Release();
        Utilities::SetStatus(dialogWindow, L"Stopped", statusPart);

    }

	config->Release();
	senseManager->Close(); 
	senseManager->Release();
}