Esempio n. 1
0
void readFrame()
{
	openni::Status rc = openni::STATUS_OK;

	openni::VideoStream* streams[] = {&g_depthStream, &g_colorStream, &g_irStream};

	int changedIndex = -1;
	while (rc == openni::STATUS_OK)
	{
		rc = openni::OpenNI::waitForAnyStream(streams, 3, &changedIndex, 0);
		if (rc == openni::STATUS_OK)
		{
			switch (changedIndex)
			{
			case 0:
				g_depthStream.readFrame(&g_depthFrame); break;
			case 1:
				g_colorStream.readFrame(&g_colorFrame); break;
			case 2:
				g_irStream.readFrame(&g_irFrame); break;
			default:
				printf("Error in wait\n");
			}
		}
	}
}
void KinectCamera::closecamera(void)
{
    mColorStream.destroy();
    mDepthStream.destroy();
    mDevice.close();
    openni:: OpenNI::shutdown();
}
Esempio n. 3
0
bool convertDepthPointToColor(int depthX, int depthY, openni::DepthPixel depthZ, int* pColorX, int* pColorY)
{
	if (!g_depthStream.isValid() || !g_colorStream.isValid())
		return false;

	return (openni::STATUS_OK == openni::CoordinateConverter::convertDepthToColor(g_depthStream, g_colorStream, depthX, depthY, depthZ, pColorX, pColorY));
}
Esempio n. 4
0
/*
void openni::VideoMode::setResolution()
Setter function for the resolution of this VideoMode. Application use of this function is not recommended.
Instead, use SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes

-- cited from OpenNI2 help. setResolution() is not recommended.
*/
bool setONI2StreamMode(openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
	//std::cout << "Ask mode: " << w << "x" << h << " " << fps << " fps. format " << format << std::endl;
	bool found = false;
	const openni::Array<openni::VideoMode>& modes = stream.getSensorInfo().getSupportedVideoModes();
	for(int i = 0, i_end = modes.getSize();i < i_end;++i){
		// std::cout << "Mode: " << modes[i].getResolutionX() << "x" << modes[i].getResolutionY() << " " << modes[i].getFps() << " fps. format " << modes[i].getPixelFormat() << std::endl;
		if(modes[i].getResolutionX() != w){
			continue;
		}
		if(modes[i].getResolutionY() != h){
			continue;
		}
		if(modes[i].getFps() != fps){
			continue;
		}
		if(modes[i].getPixelFormat() != format){
			continue;
		}
		openni::Status rc = stream.setVideoMode(modes[i]);
		if(rc != openni::STATUS_OK){
			return false;
		}
		return true;
	}
	return false;
}
Esempio n. 5
0
bool initONI2Stream(openni::Device& device, openni::SensorType sensorType, openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
  openni::Status rc = openni::STATUS_OK;
  const char* strSensor;
  if(sensorType == openni::SENSOR_COLOR){
    strSensor = "openni::SENSOR_COLOR";
  }else if(sensorType == openni::SENSOR_DEPTH){
    strSensor = "openni::SENSOR_DEPTH";
  }else{
	printf("%s:Unknown SensorType -> %d\n", __FUNCTION__, sensorType);
    return false;
  }
  rc = stream.create(device, sensorType);
  if(rc != openni::STATUS_OK){
    printf("%s:Couldn't find sensor %s: %s\n", __FUNCTION__, strSensor, openni::OpenNI::getExtendedError());
    return false;
  }
  openni::VideoMode options = stream.getVideoMode();
  printf("%s:Initial resolution %s (%d, %d) FPS %d Format %d\n", __FUNCTION__, strSensor, options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
  if(setONI2StreamMode(stream, w, h, fps, format) == false){
    printf("%s:Can't find desired mode in the %s\n", __FUNCTION__, strSensor);
    return false;
  }
  options = stream.getVideoMode();
  printf("  -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
  return true;
}
Esempio n. 6
0
void toggleStreamState(openni::VideoStream& stream, openni::VideoFrameRef& frame, bool& isOn, openni::SensorType type, const char* name)
{
    openni::Status nRetVal = openni::STATUS_OK;

    if (!stream.isValid())
    {
        nRetVal = stream.create(g_device, type);
        if (nRetVal != openni::STATUS_OK)
        {
            displayError("Failed to create %s stream:\n%s", name, openni::OpenNI::getExtendedError());
            return;
        }
    }

    if (isOn)
    {
        stream.stop();
        frame.release();
    }
    else
    {
        nRetVal = stream.start();
        if (nRetVal != openni::STATUS_OK)
        {
            displayError("Failed to start %s stream:\n%s", name, openni::OpenNI::getExtendedError());
            return;
        }
    }

    isOn = !isOn;
}
Esempio n. 7
0
bool setONI2StreamMode(openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
	/*
  void openni::VideoMode::setResolution()
	Setter function for the resolution of this VideoMode. Application use of this function is not recommended.
	Instead, use SensorInfo::getSupportedVideoModes() to obtain a list of valid video modes

  -- cited from OpenNI2 help. setResolution() is not recommended.
  */
	bool found = false;
	const openni::Array<openni::VideoMode>& modes = stream.getSensorInfo().getSupportedVideoModes();
	for(int i = 0, i_end = modes.getSize();i < i_end;++i){
		if(modes[i].getResolutionX() != w){
			continue;
		}
		if(modes[i].getResolutionY() != h){
			continue;
		}
		if(modes[i].getPixelFormat() != format){
			continue;
		}
		openni::Status rc = stream.setVideoMode(modes[i]);
		if(rc != openni::STATUS_OK){
			printf("%s:Couldn't find RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
			return false;
		}
		return true;
	}
	return false;
}
Esempio n. 8
0
int openStream(openni::Device& device, const char* name, openni::SensorType sensorType, SensorOpenType openType, openni::VideoStream& stream, const openni::SensorInfo** ppSensorInfo, bool* pbIsStreamOn)
{
    *ppSensorInfo = device.getSensorInfo(sensorType);
    *pbIsStreamOn = false;

    if (openType == SENSOR_OFF)
    {
        return 0;
    }

    if (*ppSensorInfo == NULL)
    {
        if (openType == SENSOR_ON)
        {
            printf("No %s sensor available\n", name);
            return -1;
        }
        else
        {
            return 0;
        }
    }

    openni::Status nRetVal = stream.create(device, sensorType);
    if (nRetVal != openni::STATUS_OK)
    {
        if (openType == SENSOR_ON)
        {
            printf("Failed to create %s stream:\n%s\n", openni::OpenNI::getExtendedError(), name);
            return -2;
        }
        else
        {
            return 0;
        }
    }

    nRetVal = stream.start();
    if (nRetVal != openni::STATUS_OK)
    {
        stream.destroy();

        if (openType == SENSOR_ON)
        {
            printf("Failed to start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
            return -3;
        }
        else
        {
            return 0;
        }
    }

    *pbIsStreamOn = true;

    return 0;
}
Esempio n. 9
0
void toggleImageAutoWhiteBalance(int)
{
    if (g_colorStream.getCameraSettings() == NULL)
    {
        displayError("Color stream doesn't support camera settings");
        return;
    }
    g_colorStream.getCameraSettings()->setAutoWhiteBalanceEnabled(!g_colorStream.getCameraSettings()->getAutoWhiteBalanceEnabled());
    displayMessage("Auto White balance: %s", g_colorStream.getCameraSettings()->getAutoWhiteBalanceEnabled() ? "ON" : "OFF");
}
void KinectCamera::startcamera(void)
{
    openni::OpenNI::initialize();//初始化
    mDevice.open( openni::ANY_DEVICE );//打开设备(已在全局变量中声明设备mDevice)
    mColorStream.create( mDevice, openni::SENSOR_COLOR );// 创建数据流
    mColorStream.start();//开启数据流
    mDepthStream.create( mDevice, openni::SENSOR_DEPTH );// 创建数据流
    mDepthStream.start();//开启数据流
    fig=1;
}
Esempio n. 11
0
void toggleCloseRange(int )
{
	bool bCloseRange;
	g_depthStream.getProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, &bCloseRange);

	bCloseRange = !bCloseRange;

	g_depthStream.setProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, bCloseRange);

	displayMessage ("Close range: %s", bCloseRange?"On":"Off");	
}
Esempio n. 12
0
void toggleCMOSAutoLoops(int )
{
	if (g_colorStream.getCameraSettings() == NULL)
	{
		displayMessage("Color stream doesn't support camera settings");
		return;
	}
	toggleImageAutoExposure(0);
	toggleImageAutoWhiteBalance(0);

	displayMessage ("CMOS Auto Loops: %s", g_colorStream.getCameraSettings()->getAutoExposureEnabled()?"On":"Off");	
}
Esempio n. 13
0
openni::Status HandGesture::Init(int argc, char **argv)
{
	openni::OpenNI::initialize();

	const char* deviceUri = openni::ANY_DEVICE;
	for (int i = 1; i < argc-1; ++i)
	{
		if (strcmp(argv[i], "-device") == 0)
		{
			deviceUri = argv[i+1];
			break;
		}
	}

	openni::Status rc = m_device.open(deviceUri);
	if (rc != openni::STATUS_OK)
	{
		printf("Open Device failed:\n%s\n", openni::OpenNI::getExtendedError());
		return rc;
	}

	nite::NiTE::initialize();

	if (m_pHandTracker->create(&m_device) != nite::STATUS_OK)
	{
		return openni::STATUS_ERROR;
	}


	rc = m_depthStream.create(m_device, openni::SENSOR_DEPTH);
	if (rc == openni::STATUS_OK)
	{
		rc = m_depthStream.start();
		if (rc != openni::STATUS_OK)
		{
			printf("SimpleViewer: Couldn't start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
			m_depthStream.destroy();
		}
	}
	else
	{
		printf("SimpleViewer: Couldn't find depth stream:\n%s\n", openni::OpenNI::getExtendedError());
	}


	m_pHandTracker->startGestureDetection(nite::GESTURE_WAVE);
	m_pHandTracker->startGestureDetection(nite::GESTURE_CLICK);
//	m_pHandTracker->startGestureDetection(nite::GESTURE_HAND_RAISE);

	return InitOpenCV(argc, argv);

}
Esempio n. 14
0
void changeImageExposure(int delta)
{
    if (g_colorStream.getCameraSettings() == NULL)
    {
        displayError("Color stream doesn't support camera settings");
        return;
    }
    int exposure = g_colorStream.getCameraSettings()->getExposure();
    openni::Status rc = g_colorStream.getCameraSettings()->setExposure(exposure + delta);
    if (rc != openni::STATUS_OK)
    {
        displayMessage("Can't change exposure");
        return;
    }
    displayMessage("Changed exposure to: %d", g_colorStream.getCameraSettings()->getExposure());
}
void ColorListener::onNewFrame(openni::VideoStream& vs)
{
    openni::VideoFrameRef frame;
    vs.readFrame(&frame);
    frames->push_back(frame);
    if(isUpdate) w->update();
}
Esempio n. 16
0
void setDepthVideoMode(int mode)
{
	bool bIsStreamOn = g_bIsDepthOn;
	if (bIsStreamOn)
	{
		g_bIsDepthOn = false;
		g_depthStream.stop();
	}

	g_depthStream.setVideoMode(g_depthSensorInfo->getSupportedVideoModes()[mode]);
	if (bIsStreamOn)
	{
		g_depthStream.start();
		g_bIsDepthOn = true;
	}
}
Esempio n. 17
0
void changeImageGain(int delta)
{
    if (g_colorStream.getCameraSettings() == NULL)
    {
        displayError("Color stream doesn't support camera settings");
        return;
    }
    int gain = g_colorStream.getCameraSettings()->getGain();
    openni::Status rc = g_colorStream.getCameraSettings()->setGain(gain + delta);
    if (rc != openni::STATUS_OK)
    {
        displayMessage("Can't change gain");
        return;
    }
    displayMessage("Changed gain to: %d", g_colorStream.getCameraSettings()->getGain());
}
Esempio n. 18
0
void setIRVideoMode(int mode)
{
	bool bIsStreamOn = g_bIsIROn;
	if (bIsStreamOn)
	{
		g_bIsIROn = false;
		g_irStream.stop();
	}

	g_irFrame.release();
	g_irStream.setVideoMode(g_irSensorInfo->getSupportedVideoModes()[mode]);
	if (bIsStreamOn)
	{
		g_irStream.start();
		g_bIsIROn = true;
	}
}
Esempio n. 19
0
void toggleMirror(int )
{
	toggleDepthMirror(0);
	toggleColorMirror(0);
	toggleIRMirror(0);

	displayMessage ("Mirror: %s", g_depthStream.getMirroringEnabled()?"On":"Off");	
}
Esempio n. 20
0
void streamFrameListener::onNewFrame(openni::VideoStream& stream)
{
    LockGuard guard(mutex);
    stream.readFrame(&frameRef);

    if (!frameRef.isValid() || !frameRef.getData())
    {
        yInfo() << "frame lost";
        return;
    }

    int pixC;

    pixF     = stream.getVideoMode().getPixelFormat();
    pixC     = depthCameraDriver::pixFormatToCode(pixF);
    w        = frameRef.getWidth();
    h        = frameRef.getHeight();
    dataSize = frameRef.getDataSize();

    if (isReady == false)
    {
        isReady = true;
    }

    if(pixC == VOCAB_PIXEL_INVALID)
    {
        yError() << "depthCameraDriver: Pixel Format not recognized";
        return;
    }

    image.setPixelCode(pixC);
    image.resize(w, h);

    if(image.getRawImageSize() != frameRef.getDataSize())
    {
        yError() << "depthCameraDriver:device and local copy data size doesn't match";
        return;
    }

    memcpy((void*)image.getRawImage(), (void*)frameRef.getData(), frameRef.getDataSize());
    stamp.update();
    return;
}
Esempio n. 21
0
void pcl::io::OpenNI2Grabber::processIRFrame (openni::VideoStream& stream)
{
  openni::VideoFrameRef frame;
  stream.readFrame (&frame);

  FrameWrapper::Ptr frameWrapper = boost::make_shared<Openni2FrameWrapper>(frame);

  boost::shared_ptr<IRImage> image = boost::make_shared<IRImage> ( frameWrapper );

  irCallback (image, NULL);
}
Esempio n. 22
0
void setStreamCropping(openni::VideoStream& stream, int originX, int originY, int width, int height)
{
	if (!stream.isValid())
	{
		displayMessage("Stream does not exist!");
		return;
	}
	
	if (!stream.isCroppingSupported())
	{
		displayMessage("Stream does not support cropping!");
		return;
	}
	
	openni::Status nRetVal = stream.setCropping(originX, originY, width, height);
	if (nRetVal != openni::STATUS_OK)
	{
		displayMessage("Failed to set cropping: %s", xnGetStatusString(nRetVal));
		return;
	}
}
Esempio n. 23
0
void ColorStreamListener::onNewFrame(openni::VideoStream& steam){
	//Log::i( TAG, "onNewFrame");
	steam.readFrame(&(this->Frame));
		if (Frame.isValid()){
			if ( openni::SENSOR_COLOR == Frame.getSensorType() ){
				//cv::Mat mColorMat_BGR;
				this->colorMat = new cv::Mat(Frame.getHeight(),Frame.getWidth(),CV_8UC3,(void*)Frame.getData());
				//cv::cvtColor(mColorMat,mColorMat_BGR,CV_RGB2BGR);
				this->mColorDevice->setData(*(this->colorMat));
			}/* End of if */
		}/* End of if */
}/* End of onNewFrame */
Esempio n. 24
0
void resetStreamCropping(openni::VideoStream& stream)
{
	if (!stream.isValid())
	{
		displayMessage("Stream does not exist!");
		return;
	}
	
	if (!stream.isCroppingSupported())
	{
		displayMessage("Stream does not support cropping!");
		return;
	}
	
	openni::Status nRetVal = stream.resetCropping();
	if (nRetVal != openni::STATUS_OK)
	{
		displayMessage("Failed to reset cropping: %s", xnGetStatusString(nRetVal));
		return;
	}
}
Esempio n. 25
0
void toggleCloseRange(int )
{
    static OniBool bCloseRange = FALSE;

    if (g_depthStream.getProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, &bCloseRange) != XN_STATUS_OK &&
            g_depthStream.getProperty(KINECT_DEPTH_PROPERTY_CLOSE_RANGE, &bCloseRange) != XN_STATUS_OK)
    {
        // Continue with the latest value even in case of error
    }

    bCloseRange = !bCloseRange;

    if (g_depthStream.setProperty(XN_STREAM_PROPERTY_CLOSE_RANGE, bCloseRange) != XN_STATUS_OK &&
            g_depthStream.setProperty(KINECT_DEPTH_PROPERTY_CLOSE_RANGE, bCloseRange) != XN_STATUS_OK)
    {
        displayError("Couldn't set the close range");
        return;
    }

    displayMessage ("Close range: %s", bCloseRange?"On":"Off");
}
Esempio n. 26
0
bool initONI2RGBStream(openni::Device& device, openni::VideoStream& rgb, int w, int h, int fps, openni::PixelFormat format){
	openni::Status rc = openni::STATUS_OK;
	rc = rgb.create(device, openni::SENSOR_COLOR);
	if(rc != openni::STATUS_OK){
		printf("%s:Couldn't find RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
		return false;
	}
	rc = rgb.setMirroringEnabled(false);
	if (rc != openni::STATUS_OK){
		printf("%s:setMirroringEnabled(false) failed:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
		return false;
	}
	openni::VideoMode options = rgb.getVideoMode();
	printf("Initial resolution RGB (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
	if(setONI2StreamMode(rgb, w, h, fps, format) == false){
		printf("%s:Can't find desired rgb mode\n", __FUNCTION__ );
		return false;
	}
	options = rgb.getVideoMode();
	printf("  -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
	rc = rgb.start();
	if (rc != openni::STATUS_OK){
		printf("%s:Couldn't start RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
		rgb.destroy();
		return false;
	}
	return true;
}
void KinectCamera::paint(QPainter *painter)
{

    if (!fig)//如果设备未打开,先执行startcamera
    {
        startcamera();
          if(m_streamsource=="depth")
           {
              int iMaxDepth = mDepthStream.getMaxPixelValue();
              mDepthStream.readFrame( &mDepthFrame );
              const cv::Mat mImageDepth(
                          mDepthFrame.getHeight(), mDepthFrame.getWidth(),
                          CV_16UC1, (void*)mDepthFrame.getData() );
              cv::Mat mScaledDepth;
              mImageDepth.convertTo( mScaledDepth, CV_8U, 255.0 / iMaxDepth );
              QVector<QRgb>  colorTable;
              for(int k=0;k<256;++k)
              {
                  colorTable.push_back( qRgb(k,k,k) );
              }
              KinectDepthImage= QImage((const unsigned char*)mScaledDepth.data,mDepthFrame.getWidth(), mDepthFrame.getHeight(),QImage::Format_Indexed8);
              KinectDepthImage.setColorTable(colorTable);
              painter->drawImage(boundingRect().adjusted(1, 1, -1, -1),KinectDepthImage);
          }
          else
          {
              mColorStream.readFrame( &mColorFrame );
              KinectColorImage= QImage((const unsigned char*)mColorFrame.getData(),mColorFrame.getWidth(), mColorFrame.getHeight(),QImage::Format_RGB888);
              painter->drawImage(boundingRect().adjusted(1, 1, -1, -1),KinectColorImage);
          }
    }
    else//如果设备以打开,直接执行
    {
        if(m_streamsource=="depth")
        {
            int iMaxDepth = mDepthStream.getMaxPixelValue();
            mDepthStream.readFrame( &mDepthFrame );
            const cv::Mat mImageDepth(
                        mDepthFrame.getHeight(), mDepthFrame.getWidth(),
                        CV_16UC1, (void*)mDepthFrame.getData() );
            cv::Mat mScaledDepth;
            mImageDepth.convertTo( mScaledDepth, CV_8U, 255.0 / iMaxDepth );
            QVector<QRgb>  colorTable;
            for(int k=0;k<256;++k)
            {
                colorTable.push_back( qRgb(k,k,k) );
            }
            KinectDepthImage= QImage((const unsigned char*)mScaledDepth.data,mDepthFrame.getWidth(), mDepthFrame.getHeight(),QImage::Format_Indexed8);
            KinectDepthImage.setColorTable(colorTable);
            painter->drawImage(boundingRect().adjusted(1, 1, -1, -1),KinectDepthImage);
        }
        else
        {
            mColorStream.readFrame( &mColorFrame );
            KinectColorImage= QImage((const unsigned char*)mColorFrame.getData(),mColorFrame.getWidth(), mColorFrame.getHeight(),QImage::Format_RGB888);
            painter->drawImage(boundingRect().adjusted(1, 1, -1, -1),KinectColorImage);
        }

    }
}
Esempio n. 28
0
bool record_oni(char *tmpfile, int bufsize, openni::VideoStream &depth, openni::VideoStream &color, Config &conf) {
    openni::Recorder recorder;

    time_t t = time(NULL);
    strftime(tmpfile, bufsize, "rgbd_%Y%m%d_%H-%M-%S_", localtime(&t));
    strncat(tmpfile, getenv("HOSTNAME"), bufsize);
    strncat(tmpfile, ".oni", bufsize);
    printf("Starting ONI Capture.\n");
    depth.start();
    color.start();
    openni::Status rc = recorder.create(tmpfile);
    if(rc != openni::STATUS_OK) {
        printf("Error: Failed to open '%s' for writing!\n%s", tmpfile, openni::OpenNI::getExtendedError());
        return false;
    }

    recorder.attach(color);
    recorder.attach(depth);
    recorder.start();

    struct timespec	start, tp;
    clock_gettime(CLOCK_MONOTONIC, &start);

    long tt;
    do {
        usleep(100);
        clock_gettime(CLOCK_MONOTONIC, &tp);
        tt = (tp.tv_sec-start.tv_sec)*1000+(tp.tv_nsec-start.tv_nsec)/1000000;
    } while(tt < conf.capture_time);


    recorder.stop();
    color.stop();
    depth.stop();
    recorder.destroy();

    printf("Captured ONI to '%s'\n", tmpfile);

    return true;
}
Esempio n. 29
0
void toggleIRState(int )
{
	if (g_irStream.isValid()) 
	{
		if(g_bIsIROn)
		{
			g_irStream.stop();
			g_irFrame.release();
		}
		else
		{
			openni::Status nRetVal = g_irStream.start();
			if (nRetVal != openni::STATUS_OK)
			{
				displayError("Failed to start IR stream:\n%s", openni::OpenNI::getExtendedError());
				return;
			}
		}

		g_bIsIROn = !g_bIsIROn;
	}
}
Esempio n. 30
0
void OpenNI2Interface::printModes(const openni::VideoStream& stream,const openni::VideoMode& requestedMode)
{
    const auto& modes = stream.getSensorInfo().getSupportedVideoModes();

    std::cout << "Requested mode:\n";
    printMode(requestedMode);
    
    std::cout << "Supported modes:\n";
    for(int i = 0; i < modes.getSize(); i++)
    {
        printMode(modes[i]);
    }
}