void KinectDevice::connect(int streams) { if (_streams == Streams::NO_STREAM) { _streams = streams; HRESULT hr = GetDefaultKinectSensor(&_sensor); if (hr != S_OK) throw std::exception("Cannot find Kinect Sensor"); else { hr = _sensor->Open(); } if (hr != S_OK) throw std::exception("Cannot open connection"); else { DWORD srcTypes = 0; if (_streams & Streams::DEPTH_STREAM) srcTypes |= FrameSourceTypes_Depth; if (_streams & Streams::COLOR_STREAM) srcTypes |= FrameSourceTypes_Color; if (_streams & Streams::IR_STREAM) srcTypes |= FrameSourceTypes_Infrared; if (_streams & Streams::HDIR_STREAM) srcTypes |= FrameSourceTypes_LongExposureInfrared; if (_streams & Streams::INDEX_STREAM) srcTypes |= FrameSourceTypes_BodyIndex; hr = _sensor->OpenMultiSourceFrameReader(srcTypes, &_reader); } if (hr != S_OK) throw std::exception("Cannot open frame reader"); else { hr = _reader->SubscribeMultiSourceFrameArrived(&_frameEvent); } if (hr != S_OK) throw std::exception("Cannot subscribe frame event listener"); getFrameInformation(); } else if (_sensor && _reader) { // Change the streams if (_listenThread.joinable()) { _listening = false; _listenThread.join(); } _reader->UnsubscribeMultiSourceFrameArrived(_frameEvent); _reader->Release(); _streams = streams; DWORD srcTypes = 0; if (_streams & Streams::DEPTH_STREAM) srcTypes |= FrameSourceTypes_Depth; if (_streams & Streams::COLOR_STREAM) srcTypes |= FrameSourceTypes_Color; if (_streams & Streams::IR_STREAM) srcTypes |= FrameSourceTypes_Infrared; if (_streams & Streams::HDIR_STREAM) srcTypes |= FrameSourceTypes_LongExposureInfrared; if (_streams & Streams::INDEX_STREAM) srcTypes |= FrameSourceTypes_BodyIndex; HRESULT hr = _sensor->OpenMultiSourceFrameReader(srcTypes, &_reader); if (hr != S_OK) throw std::exception("Cannot open frame reader"); else { hr = _reader->SubscribeMultiSourceFrameArrived(&_frameEvent); } if (hr != S_OK) throw std::exception("Cannot subscribe frame event listener"); } else throw std::exception("Attempted to connect to device after initialization failure"); }
Microsoft2Grabber::Microsoft2Grabber(const int instance, bool depthOnly) { HRESULT hr; int num = 0; m_person = m_depthStarted = m_videoStarted = m_audioStarted = m_infraredStarted = false; hStopEvent = NULL; hKinectThread = NULL; m_largeCloud = false; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { throw exception("Error could not get default kinect sensor"); } if (m_pKinectSensor) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { if (depthOnly) { hr = m_pKinectSensor->OpenMultiSourceFrameReader( FrameSourceTypes::FrameSourceTypes_Depth, // | FrameSourceTypes::FrameSourceTypes_Color | FrameSourceTypes::FrameSourceTypes_BodyIndex, &m_pMultiSourceFrameReader); } else { hr = m_pKinectSensor->OpenMultiSourceFrameReader( FrameSourceTypes::FrameSourceTypes_Depth | FrameSourceTypes::FrameSourceTypes_Color | FrameSourceTypes::FrameSourceTypes_BodyIndex, &m_pMultiSourceFrameReader); } if (SUCCEEDED(hr)) { m_videoStarted = m_depthStarted = true; } else throw exception("Failed to Open Kinect Multisource Stream"); } } if (!m_pKinectSensor || FAILED(hr)) { throw exception("No ready Kinect found"); } m_colorSize = Size(cColorWidth, cColorHeight); m_depthSize = Size(cDepthWidth, cDepthHeight); m_pColorRGBX = new RGBQUAD[cColorWidth * cColorHeight]; m_pColorCoordinates = new ColorSpacePoint[cDepthHeight * cDepthWidth]; m_pCameraSpacePoints = new CameraSpacePoint[cColorHeight * cColorWidth]; // create callback signals image_signal_ = createSignal<sig_cb_microsoft_image> (); depth_image_signal_ = createSignal<sig_cb_microsoft_depth_image> (); image_depth_image_signal_ = createSignal<sig_cb_microsoft_image_depth_image> (); point_cloud_rgba_signal_ = createSignal<sig_cb_microsoft_point_cloud_rgba> (); all_data_signal_ = createSignal<sig_cb_microsoft_all_data> (); /*ir_image_signal_ = createSignal<sig_cb_microsoft_ir_image> (); point_cloud_signal_ = createSignal<sig_cb_microsoft_point_cloud> (); point_cloud_i_signal_ = createSignal<sig_cb_microsoft_point_cloud_i> (); point_cloud_rgb_signal_ = createSignal<sig_cb_microsoft_point_cloud_rgb> (); */ if (!depthOnly) { rgb_sync_.addCallback (boost::bind (&Microsoft2Grabber::imageDepthImageCallback, this, _1, _2)); } }
HRESULT capKinect::initKinect() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinect); if (FAILED(hr)) return hr; if (m_pKinect) { // initialize successed IDepthFrameSource *pDepthFrameSource = NULL; hr = m_pKinect->Open(); if (SUCCEEDED(hr)) { hr = m_pKinect->get_DepthFrameSource(&pDepthFrameSource); } if (SUCCEEDED(hr)) { hr = pDepthFrameSource->OpenReader(&m_pDepthReader); } if(pDepthFrameSource)SafeRelease(pDepthFrameSource); } if (!m_pKinect || FAILED(hr)) { printf("No ready Kinect found!\n"); return E_FAIL; } return hr; }
void init() { HRESULT hr; IKinectSensor* sensor; hr = GetDefaultKinectSensor( &sensor ); Assert( hr ); sensor_.reset( sensor ); hr = sensor_->Open(); Assert( hr ); // Sensor -> Body Source IBodyFrameSource* bodySource; hr = sensor_->get_BodyFrameSource( &bodySource ); Assert( hr ); bodySource_.reset( bodySource ); // Body Source -> Body Reader IBodyFrameReader* bodyReader; hr = bodySource_->OpenReader( &bodyReader ); Assert( hr ); bodyReader_.reset( bodyReader ); // Coordinate mapper ICoordinateMapper* mapper; hr = sensor_->get_CoordinateMapper( &mapper ); Assert( hr ); coordMapper_.reset( mapper ); }
//-------------------------------------------------------------- bool ofApp::initKinect() { //senspr HRESULT hResult = S_OK; hResult = GetDefaultKinectSensor(&pSensor); if (FAILED(hResult)) { std::cerr << "Error : GetDefaultKinectSensor" << std::endl; return -1; } hResult = pSensor->Open(); if (FAILED(hResult)){ std::cerr << "Error : IKinectSensor::Open()" << std::endl; return -1; } //color hResult = pSensor->get_ColorFrameSource(&pColorSource); if (FAILED(hResult)){ std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl; return -1; } hResult = pColorSource->OpenReader(&pColorReader); if (FAILED(hResult)){ std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl; return -1; } hResult = pColorSource->CreateFrameDescription(ColorImageFormat::ColorImageFormat_Rgba, &colorDescription); if (FAILED(hResult)){ std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl; return -1; } colorDescription->get_Width(&colorWidth); colorDescription->get_Height(&colorHeight); colorDescription->get_BytesPerPixel(&colorBytesPerPixels); //body hResult = pSensor -> get_BodyFrameSource(&pBodySource); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl; return -1; } hResult = pBodySource->OpenReader( &pBodyReader ); if( FAILED( hResult ) ){ std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl; return -1; } //mapper hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl; return -1; } return true; }
// コンストラクタ KinectV2::KinectV2() { // センサを取得する if (sensor == NULL && GetDefaultKinectSensor(&sensor) == S_OK) { HRESULT hr; // センサの使用を開始する hr = sensor->Open(); assert(hr == S_OK); // デプスデータの読み込み設定 IDepthFrameSource *depthSource; hr = sensor->get_DepthFrameSource(&depthSource); assert(hr == S_OK); hr = depthSource->OpenReader(&depthReader); assert(hr == S_OK); IFrameDescription *depthDescription; hr = depthSource->get_FrameDescription(&depthDescription); assert(hr == S_OK); depthSource->Release(); // デプスデータのサイズを得る depthDescription->get_Width(&depthWidth); depthDescription->get_Height(&depthHeight); depthDescription->Release(); // カラーデータの読み込み設定 IColorFrameSource *colorSource; hr = sensor->get_ColorFrameSource(&colorSource); assert(hr == S_OK); hr = colorSource->OpenReader(&colorReader); assert(hr == S_OK); IFrameDescription *colorDescription; hr = colorSource->get_FrameDescription(&colorDescription); assert(hr == S_OK); colorSource->Release(); // カラーデータのサイズを得る colorDescription->get_Width(&colorWidth); colorDescription->get_Height(&colorHeight); colorDescription->Release(); // 座標のマッピング hr = sensor->get_CoordinateMapper(&coordinateMapper); assert(hr == S_OK); // depthCount と colorCount を計算してテクスチャとバッファオブジェクトを作成する makeTexture(); // デプスデータからカメラ座標を求めるときに用いる一時メモリを確保する position = new GLfloat[depthCount][3]; // カラーデータを変換する用いる一時メモリを確保する color = new GLubyte[colorCount * 4]; } }
/// <summary> /// Initializes the default Kinect sensor /// </summary> /// <returns>indicates success or failure</returns> HRESULT CColorBasics::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get the color reader IColorFrameSource* pColorFrameSource = NULL; IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource); } if (SUCCEEDED(hr)) { hr = pColorFrameSource->OpenReader(&m_pColorFrameReader); } // Body if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } SafeRelease(pBodyFrameSource); SafeRelease(pColorFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { SetStatusMessage(L"No ready Kinect found!", 10000, true); return E_FAIL; } return hr; }
HRESULT MyKinect2::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get the color reader IColorFrameSource* pColorFrameSource = NULL; IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource); } if (SUCCEEDED(hr)) { hr = pColorFrameSource->OpenReader(&m_pColorFrameReader); } SafeRelease(pBodyFrameSource); SafeRelease(pColorFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { qDebug("Kinect2 initialization failed 1."); return E_FAIL; } return hr; }
HRESULT MyKinect::InitKinect() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get the color reader hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) {//获取映射关系, 可以放在open前面 hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) {//注意 如果一个源没有开启,那么都不会成功 hr = m_pKinectSensor->OpenMultiSourceFrameReader(//FrameSourceTypes::FrameSourceTypes_Audio| FrameSourceTypes::FrameSourceTypes_Body| FrameSourceTypes::FrameSourceTypes_BodyIndex|FrameSourceTypes::FrameSourceTypes_Color| FrameSourceTypes::FrameSourceTypes_Depth|FrameSourceTypes::FrameSourceTypes_Infrared, &m_pMultiSourceFrameReader); } //MUlti没有source,是先mutilreader,然后从这个reader-》分别获取frame //所以没有source释放 /* 音频增加和上面的不一样 //也是source -》render-》再注册事件 // 获取音频源(AudioSource) if (SUCCEEDED(hr)){ hr = m_pKinectSensor->get_AudioSource(&m_pAudioSource); } // 再获取音频帧读取器 if (SUCCEEDED(hr)){ hr = m_pAudioSource->OpenReader(&m_pAudioBeamFrameReader); } m_pAudioBeamFrameReader->AcquireLatestBeamFrames();*/ } if (!m_pKinectSensor || FAILED(hr)) { printf("No ready Kinect found! \n"); return E_FAIL; } return hr; }
void MKinect::stop(){ if (FAILED(GetDefaultKinectSensor(&sensor))) { return; } if (sensor) { sensor->Close(); reader->Release(); } else { return; } }
// Initialize Sensor inline void Kinect::initializeSensor() { // Open Sensor ERROR_CHECK( GetDefaultKinectSensor( &kinect ) ); ERROR_CHECK( kinect->Open() ); // Check Open BOOLEAN isOpen = FALSE; ERROR_CHECK( kinect->get_IsOpen( &isOpen ) ); if( !isOpen ){ throw std::runtime_error( "failed IKinectSensor::get_IsOpen( &isOpen )" ); } }
//---------- void Device::open() { try { if (FAILED(GetDefaultKinectSensor(&this->sensor))) { throw(Exception("Failed to find sensor")); } if (FAILED(this->sensor->Open())) { throw(Exception("Failed to open sensor")); } } catch (std::exception & e) { OFXKINECTFORWINDOWS2_ERROR << e.what(); this->sensor = nullptr; } }
void kinectInit() { ERROR_CHECK(GetDefaultKinectSensor(&kinect)); ERROR_CHECK(kinect->get_CoordinateMapper(&coordinateMapper)); ERROR_CHECK(kinect->Open()); ERROR_CHECK(kinect->OpenMultiSourceFrameReader( FrameSourceTypes::FrameSourceTypes_Depth | FrameSourceTypes::FrameSourceTypes_Color | FrameSourceTypes::FrameSourceTypes_BodyIndex, &multiFrameReader)); for (int i = 0; i < dPixels; i++) { Sum[i] = 0; } //移動加算バッファを0クリア for (int i = 0; i < dPixels; i++) { depthBuffer[i] = 0; } //移動加算バッファを0クリア for (int i = 0; i < dPixels * nFrame; i++) { nDepthBuffer[i] = 0; } //0クリア }
bool KinectCapture::Initialize() { HRESULT hr; hr = GetDefaultKinectSensor(&pKinectSensor); if (FAILED(hr)) { bInitialized = false; return bInitialized; } if (pKinectSensor) { pKinectSensor->get_CoordinateMapper(&pCoordinateMapper); hr = pKinectSensor->Open(); if (SUCCEEDED(hr)) { pKinectSensor->OpenMultiSourceFrameReader(FrameSourceTypes::FrameSourceTypes_Color | FrameSourceTypes::FrameSourceTypes_Depth | FrameSourceTypes::FrameSourceTypes_Body | FrameSourceTypes::FrameSourceTypes_BodyIndex, &pMultiSourceFrameReader); } } bInitialized = SUCCEEDED(hr); if (bInitialized) { std::chrono::time_point<std::chrono::system_clock> start = std::chrono::system_clock::now(); bool bTemp; do { bTemp = AcquireFrame(); std::chrono::duration<double> elapsedSeconds = std::chrono::system_clock::now() - start; if (elapsedSeconds.count() > 5.0) { bInitialized = false; break; } } while (!bTemp); } return bInitialized; }
int KinectSensor::initialize() { /*Sensor*/ hResult = S_OK; hResult = GetDefaultKinectSensor(&pSensor); if (FAILED(hResult)){ std::cerr << "Error:GetDefaultKinectSensor" << std::endl; return 0; } hResult = pSensor->Open(); if (FAILED(hResult)){ std::cerr << "Error:Open()" << std::endl; return 0; } /*color and depth*/ //Source hResult = pSensor->get_DepthFrameSource(&pDepthSource); if (FAILED(hResult)){ std::cerr << "Error:get_DepthFrameSource()" << std::endl; return 0; } hResult = pSensor->get_ColorFrameSource(&pColorSource); if (FAILED(hResult)){ std::cerr << "Error:get_ColorFrameSource()" << std::endl; return 0; } //Reader hResult = pDepthSource->OpenReader(&pDepthReader); // if (FAILED(hResult)){ std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl; return 0; } hResult = pColorSource->OpenReader(&pColorReader); // if (FAILED(hResult)){ std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl; return 0; } /*座標変換用*/ HRESULT hResult = pSensor->get_CoordinateMapper(&pCoordinateMapper); if (FAILED(hResult)) { std::cerr << "ERROR" << std::endl; return 0; } return 1; }
int TextureManager::initializeDefaultSensor(char *errorMessage) { HRESULT hr; sprintf_s(errorMessage, MAX_ERROR_LENGTH, ""); hr = GetDefaultKinectSensor(&kinect_sensor_); if (FAILED(hr)) { sprintf_s(errorMessage, MAX_ERROR_LENGTH, "Could not initialize Kinect"); kinect_sensor_ = NULL; return -1; } if (kinect_sensor_) { // Initialize the Kinect and get the depth reader IDepthFrameSource* depthFrameSource = NULL; hr = kinect_sensor_->Open(); if (SUCCEEDED(hr)) { hr = kinect_sensor_->get_DepthFrameSource(&depthFrameSource); } if (SUCCEEDED(hr)) { hr = depthFrameSource->OpenReader(&depth_frame_reader_); } if (depthFrameSource) { depthFrameSource->Release(); depthFrameSource = NULL; } } if (!kinect_sensor_ || FAILED(hr)) { sprintf_s(errorMessage, MAX_ERROR_LENGTH, "Could not open kinect depth reader"); return -1; } return 0; }
bool initKinect() { if (FAILED(GetDefaultKinectSensor(&sensor))) { return false; } if (sensor) { sensor->Open(); IColorFrameSource* framesource = NULL; sensor->get_ColorFrameSource(&framesource); framesource->OpenReader(&reader); if (framesource) { framesource->Release(); framesource = NULL; } return true; } else { return false; } }
void initialize() { // Sensorを取得 ERROR_CHECK( GetDefaultKinectSensor( &kinect ) ); ERROR_CHECK( kinect->Open() ); BOOLEAN isOpen; ERROR_CHECK( kinect->get_IsOpen( &isOpen ) ); if( !isOpen ){ throw std::runtime_error( "failed IKinectSensor::get_IsOpen( &isOpen )" ); } ERROR_CHECK( kinect->get_CoordinateMapper( &coordinateMapper ) ); // Color Frame Sourceを取得、Color Frame Readerを開く CComPtr<IColorFrameSource> colorFrameSource; ERROR_CHECK( kinect->get_ColorFrameSource( &colorFrameSource ) ); ERROR_CHECK( colorFrameSource->OpenReader( &colorFrameReader ) ); CComPtr<IFrameDescription> colorFrameDescription; ERROR_CHECK( colorFrameSource->CreateFrameDescription( colorFormat, &colorFrameDescription ) ); ERROR_CHECK( colorFrameDescription->get_Width( &colorWidth ) ); ERROR_CHECK( colorFrameDescription->get_Height( &colorHeight ) ); ERROR_CHECK( colorFrameDescription->get_BytesPerPixel( &colorBytesPerPixel ) ); colorBuffer.resize( colorWidth * colorHeight * colorBytesPerPixel ); // Depth Frame Sourceを取得、Depth Frame Readerを開く CComPtr<IDepthFrameSource> depthFrameSource; ERROR_CHECK( kinect->get_DepthFrameSource( &depthFrameSource ) ); ERROR_CHECK( depthFrameSource->OpenReader( &depthFrameReader ) ); CComPtr<IFrameDescription> depthFrameDescription; ERROR_CHECK( depthFrameSource->get_FrameDescription( &depthFrameDescription ) ); ERROR_CHECK( depthFrameDescription->get_Width( &depthWidth ) ); ERROR_CHECK( depthFrameDescription->get_Height( &depthHeight ) ); ERROR_CHECK( depthFrameDescription->get_BytesPerPixel( &depthBytesPerPixel ) ); depthBuffer.resize( depthWidth * depthHeight ); // Fusionの初期化 initializeFusion(); }
HRESULT BreathingClass::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get coordinate mapper and the body reader IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } SafeRelease(pBodyFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { return E_FAIL; } return hr; }
Kinectv2::Kinectv2() { hResult = S_OK; hResult = GetDefaultKinectSensor(&pSensor); if(FAILED(hResult)){ Error_check("GetDefaultKinectSensor"); } hResult = pSensor->Open(); if(FAILED(hResult)){ Error_check("Open()"); } colorWidth = 0; colorHeight = 0; depthHeight = 0; depthWidth = 0; pColorFrame = nullptr; pDepthFrame = nullptr; }
bool KinectV2Module::initKinect() { #if JUCE_WINDOWS HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { LOG("Kinect init failed"); return false; } if (m_pKinectSensor) { // Initialize the Kinect and get coordinate mapper and the body reader IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); if (SUCCEEDED(hr)) hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); if (SUCCEEDED(hr)) hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); SafeRelease(pBodyFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { LOG("No ready Kinect found"); return false; } LOG("Kinect is initialized"); return true; #else return false; #endif }
//コンストラクタ Kinect::Kinect() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) exit(-1); if (m_pKinectSensor) { hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { kinectBody = new KinectBody(m_pKinectSensor); // kinectColor = new KinectColor(m_pKinectSensor); } } if (!m_pKinectSensor || FAILED(hr)) exit(-1); }
HRESULT KinectHDFaceGrabber::initializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)){ hr = initColorFrameReader(); } if (SUCCEEDED(hr)){ hr = initDepthFrameReader(); } if (SUCCEEDED(hr)){ hr = initHDFaceReader(); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } } if (!m_pKinectSensor || FAILED(hr)) { statusChanged(L"No ready Kinect found!", true); return E_FAIL; } return hr; }
bool KinectPlugin::initializeDefaultSensor() { #ifdef HAVE_KINECT HRESULT hr; hr = GetDefaultKinectSensor(&_kinectSensor); if (FAILED(hr)) { return false; } if (_kinectSensor) { // Initialize the Kinect and get coordinate mapper and the body reader IBodyFrameSource* bodyFrameSource = NULL; hr = _kinectSensor->Open(); if (SUCCEEDED(hr)) { hr = _kinectSensor->get_CoordinateMapper(&_coordinateMapper); } if (SUCCEEDED(hr)) { hr = _kinectSensor->get_BodyFrameSource(&bodyFrameSource); } if (SUCCEEDED(hr)) { hr = bodyFrameSource->OpenReader(&_bodyFrameReader); } SafeRelease(bodyFrameSource); } if (!_kinectSensor || FAILED(hr)) { return false; } return true; #else return false; #endif }
/// <summary> /// Initializes the default Kinect sensor /// </summary> /// <returns>indicates success or failure</returns> HRESULT CDepthBasics::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get the depth reader IDepthFrameSource* pDepthFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_DepthFrameSource(&pDepthFrameSource); } if (SUCCEEDED(hr)) { hr = pDepthFrameSource->OpenReader(&m_pDepthFrameReader); } SafeRelease(pDepthFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { SetStatusMessage(L"No ready Kinect found!", 10000, true); return E_FAIL; } return hr; }
HRESULT KinectManager::Initialize(void) { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) return hr; if (m_pKinectSensor) { // Initialize the Kinect and get the color reader IColorFrameSource* pColorFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource); } if (SUCCEEDED(hr)) { hr = pColorFrameSource->OpenReader(&m_pColorFrameReader); } SafeRelease(pColorFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { //SetStatusMessage(L"No ready Kinect found!", 10000, true); return E_FAIL; } return hr; }
/// <summary> /// Initializes the default Kinect sensor /// </summary> /// <returns>indicates success or failure</returns> HRESULT CCoordinateMappingBasics::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get coordinate mapper and the frame reader if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->OpenMultiSourceFrameReader( FrameSourceTypes::FrameSourceTypes_Depth | FrameSourceTypes::FrameSourceTypes_Color | FrameSourceTypes::FrameSourceTypes_BodyIndex | FrameSourceTypes::FrameSourceTypes_Body, &m_pMultiSourceFrameReader); } } if (!m_pKinectSensor || FAILED(hr)) { SetStatusMessage(L"No ready Kinect found!", 10000, true); return E_FAIL; } return hr; }
void KLController::open() { if(isOpened() && checkThread && checkThread->isRunning()) { return; } checkThread->stop(); checkThread->wait(); safeRelease(sensor); HRESULT hr = GetDefaultKinectSensor(&sensor); if(SUCCEEDED(hr)) { hr = sensor->Open(); } else { emit _hrError(hr); } if(SUCCEEDED(hr)) { hr = sensor->get_CoordinateMapper(&coordMapper); } else { emit _hrError(hr); } if(SUCCEEDED(hr)) { checkThread->setSensor(sensor); checkThread->start(); qDebug()<<"[KLController] Opened"; emit _open(true); if(isAvailable()) { qDebug()<<"[KLController] Connected"; emit _available(true); } else { qDebug()<<"[KLController] Disconnected"; emit _available(false); } } else { emit _hrError(hr); } }
int _tmain( int argc, _TCHAR* argv[] ) { cv::setUseOptimized( true ); // Sensor IKinectSensor* pSensor; HRESULT hResult = S_OK; hResult = GetDefaultKinectSensor( &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : GetDefaultKinectSensor" << std::endl; return -1; } hResult = pSensor->Open( ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::Open()" << std::endl; return -1; } // Source IColorFrameSource* pColorSource; hResult = pSensor->get_ColorFrameSource( &pColorSource ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl; return -1; } IBodyFrameSource* pBodySource; hResult = pSensor->get_BodyFrameSource( &pBodySource ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl; return -1; } // Reader IColorFrameReader* pColorReader; hResult = pColorSource->OpenReader( &pColorReader ); if( FAILED( hResult ) ){ std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl; return -1; } IBodyFrameReader* pBodyReader; hResult = pBodySource->OpenReader( &pBodyReader ); if( FAILED( hResult ) ){ std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl; return -1; } // Description IFrameDescription* pDescription; hResult = pColorSource->get_FrameDescription( &pDescription ); if( FAILED( hResult ) ){ std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl; return -1; } int width = 0; int height = 0; pDescription->get_Width( &width ); // 1920 pDescription->get_Height( &height ); // 1080 unsigned int bufferSize = width * height * 4 * sizeof( unsigned char ); cv::Mat bufferMat( height, width, CV_8UC4 ); cv::Mat bodyMat( height / 2, width / 2, CV_8UC4 ); cv::namedWindow( "Body" ); // Color Table cv::Vec3b color[BODY_COUNT]; color[0] = cv::Vec3b( 255, 0, 0 ); color[1] = cv::Vec3b( 0, 255, 0 ); color[2] = cv::Vec3b( 0, 0, 255 ); color[3] = cv::Vec3b( 255, 255, 0 ); color[4] = cv::Vec3b( 255, 0, 255 ); color[5] = cv::Vec3b( 0, 255, 255 ); // Coordinate Mapper ICoordinateMapper* pCoordinateMapper; hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl; return -1; } while( 1 ){ // Frame IColorFrame* pColorFrame = nullptr; hResult = pColorReader->AcquireLatestFrame( &pColorFrame ); if( SUCCEEDED( hResult ) ){ hResult = pColorFrame->CopyConvertedFrameDataToArray( bufferSize, reinterpret_cast<BYTE*>( bufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra ); if( SUCCEEDED( hResult ) ){ cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 ); } } //SafeRelease( pColorFrame ); IBodyFrame* pBodyFrame = nullptr; hResult = pBodyReader->AcquireLatestFrame( &pBodyFrame ); if( SUCCEEDED( hResult ) ){ IBody* pBody[BODY_COUNT] = { 0 }; hResult = pBodyFrame->GetAndRefreshBodyData( BODY_COUNT, pBody ); if( SUCCEEDED( hResult ) ){ for( int count = 0; count < BODY_COUNT; count++ ){ BOOLEAN bTracked = false; hResult = pBody[count]->get_IsTracked( &bTracked ); if( SUCCEEDED( hResult ) && bTracked ){ Joint joint[JointType::JointType_Count]; hResult = pBody[ count ]->GetJoints( JointType::JointType_Count, joint ); if( SUCCEEDED( hResult ) ){ // Left Hand State HandState leftHandState = HandState::HandState_Unknown; hResult = pBody[count]->get_HandLeftState( &leftHandState ); if( SUCCEEDED( hResult ) ){ ColorSpacePoint colorSpacePoint = { 0 }; hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandLeft].Position, &colorSpacePoint ); if( SUCCEEDED( hResult ) ){ int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ if( leftHandState == HandState::HandState_Open ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA ); } else if( leftHandState == HandState::HandState_Closed ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA ); } else if( leftHandState == HandState::HandState_Lasso ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA ); } } } } // Right Hand State HandState rightHandState = HandState::HandState_Unknown; hResult = pBody[count]->get_HandRightState( &rightHandState ); if( SUCCEEDED( hResult ) ){ ColorSpacePoint colorSpacePoint = { 0 }; hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandRight].Position, &colorSpacePoint ); if( SUCCEEDED( hResult ) ){ int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ if( rightHandState == HandState::HandState_Open ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA ); } else if( rightHandState == HandState::HandState_Closed ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA ); } else if( rightHandState == HandState::HandState_Lasso ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA ); } } } } // Joint for( int type = 0; type < JointType::JointType_Count; type++ ){ ColorSpacePoint colorSpacePoint = { 0 }; pCoordinateMapper->MapCameraPointToColorSpace( joint[type].Position, &colorSpacePoint ); int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ cv::circle( bufferMat, cv::Point( x, y ), 5, static_cast< cv::Scalar >( color[count] ), -1, CV_AA ); } } } /*// Activity UINT capacity = 0; DetectionResult detectionResults = DetectionResult::DetectionResult_Unknown; hResult = pBody[count]->GetActivityDetectionResults( capacity, &detectionResults ); if( SUCCEEDED( hResult ) ){ if( detectionResults == DetectionResult::DetectionResult_Yes ){ switch( capacity ){ case Activity::Activity_EyeLeftClosed: std::cout << "Activity_EyeLeftClosed" << std::endl; break; case Activity::Activity_EyeRightClosed: std::cout << "Activity_EyeRightClosed" << std::endl; break; case Activity::Activity_MouthOpen: std::cout << "Activity_MouthOpen" << std::endl; break; case Activity::Activity_MouthMoved: std::cout << "Activity_MouthMoved" << std::endl; break; case Activity::Activity_LookingAway: std::cout << "Activity_LookingAway" << std::endl; break; default: break; } } } else{ std::cerr << "Error : IBody::GetActivityDetectionResults()" << std::endl; }*/ /*// Appearance capacity = 0; detectionResults = DetectionResult::DetectionResult_Unknown; hResult = pBody[count]->GetAppearanceDetectionResults( capacity, &detectionResults ); if( SUCCEEDED( hResult ) ){ if( detectionResults == DetectionResult::DetectionResult_Yes ){ switch( capacity ){ case Appearance::Appearance_WearingGlasses: std::cout << "Appearance_WearingGlasses" << std::endl; break; default: break; } } } else{ std::cerr << "Error : IBody::GetAppearanceDetectionResults()" << std::endl; }*/ /*// Expression capacity = 0; detectionResults = DetectionResult::DetectionResult_Unknown; hResult = pBody[count]->GetExpressionDetectionResults( capacity, &detectionResults ); if( SUCCEEDED( hResult ) ){ if( detectionResults == DetectionResult::DetectionResult_Yes ){ switch( capacity ){ case Expression::Expression_Happy: std::cout << "Expression_Happy" << std::endl; break; case Expression::Expression_Neutral: std::cout << "Expression_Neutral" << std::endl; break; default: break; } } } else{ std::cerr << "Error : IBody::GetExpressionDetectionResults()" << std::endl; }*/ // Lean PointF amount; hResult = pBody[count]->get_Lean( &amount ); if( SUCCEEDED( hResult ) ){ std::cout << "amount : " << amount.X << ", " << amount.Y << std::endl; } } } cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 ); } for( int count = 0; count < BODY_COUNT; count++ ){ SafeRelease( pBody[count] ); } } //SafeRelease( pBodyFrame ); SafeRelease( pColorFrame ); SafeRelease( pBodyFrame ); cv::imshow( "Body", bodyMat ); if( cv::waitKey( 10 ) == VK_ESCAPE ){ break; } } SafeRelease( pColorSource ); SafeRelease( pBodySource ); SafeRelease( pColorReader ); SafeRelease( pBodyReader ); SafeRelease( pDescription ); SafeRelease( pCoordinateMapper ); if( pSensor ){ pSensor->Close(); } SafeRelease( pSensor ); cv::destroyAllWindows(); return 0; }
int _tmain( int argc, _TCHAR* argv[] ) { cv::setUseOptimized( true ); // Sensor IKinectSensor* pSensor; HRESULT hResult = S_OK; hResult = GetDefaultKinectSensor( &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : GetDefaultKinectSensor" << std::endl; return -1; } hResult = pSensor->Open(); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::Open()" << std::endl; return -1; } // Source IDepthFrameSource* pDepthSource; hResult = pSensor->get_DepthFrameSource( &pDepthSource ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_DepthFrameSource()" << std::endl; return -1; } // Reader IDepthFrameReader* pDepthReader; hResult = pDepthSource->OpenReader( &pDepthReader ); if( FAILED( hResult ) ){ std::cerr << "Error : IDepthFrameSource::OpenReader()" << std::endl; return -1; } // Description IFrameDescription* pDescription; hResult = pDepthSource->get_FrameDescription( &pDescription ); if( FAILED( hResult ) ){ std::cerr << "Error : IDepthFrameSource::get_FrameDescription()" << std::endl; return -1; } int width = 0; int height = 0; pDescription->get_Width( &width ); // 512 pDescription->get_Height( &height ); // 424 unsigned int bufferSize = width * height * sizeof( unsigned short ); cv::Mat bufferMat( height, width, CV_16UC1 ); cv::Mat depthMat( height, width, CV_8UC1 ); cv::namedWindow( "Depth" ); // Coordinate Mapper ICoordinateMapper* pCoordinateMapper; hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl; return -1; } // Create Reconstruction NUI_FUSION_RECONSTRUCTION_PARAMETERS reconstructionParameter; reconstructionParameter.voxelsPerMeter = 256; reconstructionParameter.voxelCountX = 512; reconstructionParameter.voxelCountY = 384; reconstructionParameter.voxelCountZ = 512; Matrix4 worldToCameraTransform; SetIdentityMatrix( worldToCameraTransform ); INuiFusionReconstruction* pReconstruction; hResult = NuiFusionCreateReconstruction( &reconstructionParameter, NUI_FUSION_RECONSTRUCTION_PROCESSOR_TYPE_AMP, -1, &worldToCameraTransform, &pReconstruction ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiFusionCreateReconstruction()" << std::endl; return -1; } // Create Image Frame // Depth NUI_FUSION_IMAGE_FRAME* pDepthFloatImageFrame; hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_FLOAT, width, height, nullptr, &pDepthFloatImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiFusionCreateImageFrame( FLOAT )" << std::endl; return -1; } // SmoothDepth NUI_FUSION_IMAGE_FRAME* pSmoothDepthFloatImageFrame; hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_FLOAT, width, height, nullptr, &pSmoothDepthFloatImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiFusionCreateImageFrame( FLOAT )" << std::endl; return -1; } // Point Cloud NUI_FUSION_IMAGE_FRAME* pPointCloudImageFrame; hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_POINT_CLOUD, width, height, nullptr, &pPointCloudImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiFusionCreateImageFrame( POINT_CLOUD )" << std::endl; return -1; } // Surface NUI_FUSION_IMAGE_FRAME* pSurfaceImageFrame; hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_COLOR, width, height, nullptr, &pSurfaceImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiFusionCreateImageFrame( COLOR )" << std::endl; return -1; } // Normal NUI_FUSION_IMAGE_FRAME* pNormalImageFrame; hResult = NuiFusionCreateImageFrame( NUI_FUSION_IMAGE_TYPE_COLOR, width, height, nullptr, &pNormalImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiFusionCreateImageFrame( COLOR )" << std::endl; return -1; } cv::namedWindow( "Surface" ); cv::namedWindow( "Normal" ); while( 1 ){ // Frame IDepthFrame* pDepthFrame = nullptr; hResult = pDepthReader->AcquireLatestFrame( &pDepthFrame ); if( SUCCEEDED( hResult ) ){ hResult = pDepthFrame->AccessUnderlyingBuffer( &bufferSize, reinterpret_cast<UINT16**>( &bufferMat.data ) ); if( SUCCEEDED( hResult ) ){ bufferMat.convertTo( depthMat, CV_8U, -255.0f / 8000.0f, 255.0f ); hResult = pReconstruction->DepthToDepthFloatFrame( reinterpret_cast<UINT16*>( bufferMat.data ), width * height * sizeof( UINT16 ), pDepthFloatImageFrame, NUI_FUSION_DEFAULT_MINIMUM_DEPTH/* 0.5[m] */, NUI_FUSION_DEFAULT_MAXIMUM_DEPTH/* 8.0[m] */, true ); if( FAILED( hResult ) ){ std::cerr << "Error :INuiFusionReconstruction::DepthToDepthFloatFrame()" << std::endl; return -1; } } } SafeRelease( pDepthFrame ); // Smooting Depth Image Frame hResult = pReconstruction->SmoothDepthFloatFrame( pDepthFloatImageFrame, pSmoothDepthFloatImageFrame, 1, 0.04f ); if( FAILED( hResult ) ){ std::cerr << "Error :INuiFusionReconstruction::SmoothDepthFloatFrame" << std::endl; return -1; } // Reconstruction Process pReconstruction->GetCurrentWorldToCameraTransform( &worldToCameraTransform ); hResult = pReconstruction->ProcessFrame( pSmoothDepthFloatImageFrame, NUI_FUSION_DEFAULT_ALIGN_ITERATION_COUNT, NUI_FUSION_DEFAULT_INTEGRATION_WEIGHT, nullptr, &worldToCameraTransform ); if( FAILED( hResult ) ){ static int errorCount = 0; errorCount++; if( errorCount >= 100 ) { errorCount = 0; ResetReconstruction( pReconstruction, &worldToCameraTransform ); } } // Calculate Point Cloud hResult = pReconstruction->CalculatePointCloud( pPointCloudImageFrame, &worldToCameraTransform ); if( FAILED( hResult ) ){ std::cerr << "Error : CalculatePointCloud" << std::endl; return -1; } // Shading Point Clouid Matrix4 worldToBGRTransform = { 0.0f }; worldToBGRTransform.M11 = reconstructionParameter.voxelsPerMeter / reconstructionParameter.voxelCountX; worldToBGRTransform.M22 = reconstructionParameter.voxelsPerMeter / reconstructionParameter.voxelCountY; worldToBGRTransform.M33 = reconstructionParameter.voxelsPerMeter / reconstructionParameter.voxelCountZ; worldToBGRTransform.M41 = 0.5f; worldToBGRTransform.M42 = 0.5f; worldToBGRTransform.M43 = 0.0f; worldToBGRTransform.M44 = 1.0f; hResult = NuiFusionShadePointCloud( pPointCloudImageFrame, &worldToCameraTransform, &worldToBGRTransform, pSurfaceImageFrame, pNormalImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiFusionShadePointCloud" << std::endl; return -1; } cv::Mat surfaceMat( height, width, CV_8UC4, pSurfaceImageFrame->pFrameBuffer->pBits ); cv::Mat normalMat( height, width, CV_8UC4, pNormalImageFrame->pFrameBuffer->pBits ); cv::imshow( "Depth", depthMat ); cv::imshow( "Surface", surfaceMat ); cv::imshow( "Normal", normalMat ); int key = cv::waitKey( 30 ); if( key == VK_ESCAPE ){ break; } else if( key == 'r' ){ ResetReconstruction( pReconstruction, &worldToCameraTransform ); } } SafeRelease( pDepthSource ); SafeRelease( pDepthReader ); SafeRelease( pDescription ); SafeRelease( pCoordinateMapper ); SafeRelease( pReconstruction ); NuiFusionReleaseImageFrame( pDepthFloatImageFrame ); NuiFusionReleaseImageFrame( pSmoothDepthFloatImageFrame ); NuiFusionReleaseImageFrame( pPointCloudImageFrame ); NuiFusionReleaseImageFrame( pSurfaceImageFrame ); NuiFusionReleaseImageFrame( pNormalImageFrame ); if( pSensor ){ pSensor->Close(); } SafeRelease( pSensor ); cv::destroyAllWindows(); return 0; }