void Kinect::KinectWindow::pausewindows() { bool start=false; //check button name, based on that stop pause and start if ( mKinectPause->text().toStdString().compare( tr( "Start" ).toStdString() )==0 ) { //inicialize openni and start device // start = thr->inicializeKinect(); if ( start ) { emit startKinect(); emit sendImageKinect( true ); mKinectPause->setText( tr( "Pause" ) ); } } else if ( mKinectPause->text().toStdString().compare( tr( "Pause" ).toStdString() )==0 ) { mKinectPause->setText( tr( "Continue" ) ); emit sendImageKinect( false ); //this->thr->closeActionOpenni(); } else { //inicialize openni and start device //start = thr->inicializeKinect(); if ( start ) { mKinectPause->setText( tr( "Pause" ) ); emit sendImageKinect( true ); } } }
void FreenectGrabber :: run() { setThreadShouldExit(false); m_current_image.setCalibration(m_calib_data); m_rgbd_image.setCalibration(m_calib_data); m_rgbd_image.rawRgbRef() = Mat3b(FREENECT_FRAME_H, FREENECT_FRAME_W); m_rgbd_image.rawDepthRef() = Mat1f(FREENECT_FRAME_H, FREENECT_FRAME_W); m_rgbd_image.rawIntensityRef() = Mat1f(FREENECT_FRAME_H, FREENECT_FRAME_W); m_current_image.rawRgbRef() = Mat3b(FREENECT_FRAME_H, FREENECT_FRAME_W); m_current_image.rawDepthRef() = Mat1f(FREENECT_FRAME_H, FREENECT_FRAME_W); m_current_image.rawIntensityRef() = Mat1f(FREENECT_FRAME_H, FREENECT_FRAME_W); startKinect(); int64 last_grab_time = 0; while (!threadShouldExit()) { waitForNewEvent(-1); // Use infinite timeout in order to honor sync mode. while (m_depth_transmitted || m_rgb_transmitted) freenect_process_events(f_ctx); // m_current_image.rawDepth().copyTo(m_current_image.rawAmplitudeRef()); // m_current_image.rawDepth().copyTo(m_current_image.rawIntensityRef()); { int64 grab_time = ntk::Time::getMillisecondCounter(); ntk_dbg_print(grab_time - last_grab_time, 2); last_grab_time = grab_time; QWriteLocker locker(&m_lock); // FIXME: ugly hack to handle the possible time // gaps between rgb and IR frames in dual mode. if (m_dual_ir_rgb) m_current_image.copyTo(m_rgbd_image); else m_current_image.swap(m_rgbd_image); m_rgb_transmitted = true; m_depth_transmitted = true; } if (m_dual_ir_rgb) setIRMode(!m_ir_mode); advertiseNewFrame(); #ifdef _WIN32 // FIXME: this is to avoid GUI freezes with libfreenect on Windows. // See http://groups.google.com/group/openkinect/t/b1d828d108e9e69 Sleep(1); #endif } }
void QOpenCV::OpenCVWindow::onKinectStartCancel( bool checked ) { if ( checked ) { qDebug() << "Startujem Kinect Thread"; mKinectPB->setText( tr( "Pause Kinect" ) ); emit startKinect(); emit sendImageKinect( true ); } else { qDebug() << "Pausujem Kinect Thread"; mKinectPB->setText( tr( "Start Kinect" ) ); emit sendImageKinect( false ); } }
HRESULT KinectManager::initialize() { // reset vars // kinectInitialized = false; kinectFailed = false; // create vars // int numSensors; //get number of sensors HRESULT hr = NuiGetSensorCount(&numSensors); if (SUCCEEDED(hr)) { if (numSensors == 0) { //NO KINECTS CONNECTED hr = MAKE_HRESULT(0, 0, -1); kinectFailed = true; cout << "No Kinect sensors found\n"; } else { cout << "Found sensor...\n"; //get the first kinect and its status NuiCreateSensorByIndex(0, &pKinect); hr = pKinect->NuiStatus(); } } if (SUCCEEDED(hr)) { //kinect ready cout << "Kinect is ready, starting now\n"; kinectInitialized = true; startKinect(); } else if (hr != S_NUI_INITIALIZING) { kinectFailed = true; cout << "Kinect failed and is not initializing\n"; } return hr; }
void OpenCV::OpenCVCore::createConnectionKinect() { //for video sending QObject::connect( mOpencvWindow, SIGNAL( sendImageKinect( bool ) ), mThrKinect, SLOT( setImageSend( bool ) ) ); //send image to label QObject::connect( mThrKinect, SIGNAL( pushImage( cv::Mat ) ), mOpencvWindow, SLOT( setLabel( cv::Mat ) ) ); //start QObject::connect( mOpencvWindow, SIGNAL( startKinect() ), mThrKinect, SLOT( start() ) ); //stop QObject::connect( mOpencvWindow, SIGNAL( stopKinect( bool ) ), mThrKinect, SLOT( setCancel( bool ) ) ); // moving camera with gesture QObject::connect( mThrKinect, SIGNAL( sendSliderCoords( float,float,float ) ), AppCore::Core::getInstance( NULL )->getCoreWindow()->getCameraManipulator(), SLOT( setRotationHeadKinect( float,float,float ) ) ); //enable/disable cursor movement QObject::connect( mOpencvWindow, SIGNAL( setMovementCursor( bool ) ), mThrKinect, SLOT( setCursorMovement( bool ) ) ); //enable/disable zoom QObject::connect( mOpencvWindow, SIGNAL( setZoom( bool ) ), mThrKinect, SLOT( setZoomUpdate( bool ) ) ); //edit for speed movement QObject::connect( mOpencvWindow, SIGNAL( sendSpeedKinect( double ) ), mThrKinect, SLOT( setSpeedKinect( double ) ) ); //edit for speed movement QObject::connect( mOpencvWindow, SIGNAL( inicializeKinect() ), mThrKinect, SLOT( inicializeKinect() ) ); //edit for speed movement QObject::connect( mOpencvWindow, SIGNAL( closeActionOpenni() ), mThrKinect, SLOT( closeActionOpenni() ) ); QObject::connect( mOpencvWindow, SIGNAL( setKinectMarkerDetection( bool ) ), mThrKinect, SLOT( setImageSendToMarkerDetection( bool ) ) ); //enable/disable sending picture to Marker Detection QObject::connect( mThrKinect, SIGNAL( pushImageToMarkerDetection( cv::Mat ) ), mThrAruco, SLOT( detectMarkerFromImage( cv::Mat ) ) ); //send augmented Image created in Kinect QObject::connect( mThrAruco, SIGNAL( pushImageFromKinect( cv::Mat ) ), mOpencvWindow, SLOT( setLabel( cv::Mat ) ) ); QObject::connect( mOpencvWindow, SIGNAL( setKinectCaptureImage( bool ) ), mThrKinect, SLOT( setCaptureImage( bool ) ) ); }
HRESULT KinectManager::update(float timeScale) { //first add time to particles and remove them if they're dead try{ pHandPositions->size(); } catch (exception e) { cout << e.what() << endl; return MAKE_HRESULT(0, 0, -1); } for (vector<Particle*>::iterator hp = pHandPositions->begin(); hp != pHandPositions->end();) { (*hp)->timer += 16.f / timeScale; if ((*hp)->timer > handParticleTimeout) hp = pHandPositions->erase(hp); else ++hp; } if (kinectFailed) return MAKE_HRESULT(0, 0, -1); HRESULT hr; if (!kinectInitialized) { hr = pKinect->NuiStatus(); if (hr == S_NUI_INITIALIZING) { cout << "kinect is still initializing...\n"; return hr; } else if (FAILED(hr)) { kinectFailed = true; return hr; } //kinect ready, initialize kinectInitialized = true; startKinect(); } // kinect if okay, do updating // //get next frame hr = pKinect->NuiSkeletonGetNextFrame(5, pSkeletonFrame); if (SUCCEEDED(hr)) { //pHandPos->clear(); ofVec3f pos; for (int i = 0; i < NUI_SKELETON_COUNT; i++) { //if its a tracked skeleton if (pSkeletonFrame->SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED) { //left hand pos = handPositionToScreenPosition( pSkeletonFrame->SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_LEFT] ); addUpdate(pos, pSkeletonFrame->SkeletonData[i].dwTrackingID, NUI_SKELETON_POSITION_HAND_LEFT); pos = handPositionToScreenPosition( pSkeletonFrame->SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HAND_RIGHT] ); addUpdate(pos, pSkeletonFrame->SkeletonData[i].dwTrackingID, NUI_SKELETON_POSITION_HAND_RIGHT); } } } else if (hr == E_NUI_FRAME_NO_DATA || hr == S_FALSE) { //new frame was not yet available //if (hr == E_NUI_FRAME_NO_DATA) //cout << "no Kinect Data" << ofGetFrameNum() << endl; return MAKE_HRESULT(0, 0, 1); } //NUI_IMAGE_FRAME nextFrame; //hr = pKinect->NuiImageStreamGetNextFrame(colorStream, 20, &nextFrame); return hr; }