示例#1
0
文件: test.cpp 项目: EduFdez/mrpt
bool initONI2RGBStream(openni::Device& device, openni::VideoStream& rgb, int w, int h, int fps, openni::PixelFormat format){
	openni::Status rc = openni::STATUS_OK;
	rc = rgb.create(device, openni::SENSOR_COLOR);
	if(rc != openni::STATUS_OK){
		printf("%s:Couldn't find RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
		return false;
	}
	rc = rgb.setMirroringEnabled(false);
	if (rc != openni::STATUS_OK){
		printf("%s:setMirroringEnabled(false) failed:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
		return false;
	}
	openni::VideoMode options = rgb.getVideoMode();
	printf("Initial resolution RGB (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
	if(setONI2StreamMode(rgb, w, h, fps, format) == false){
		printf("%s:Can't find desired rgb mode\n", __FUNCTION__ );
		return false;
	}
	options = rgb.getVideoMode();
	printf("  -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
	rc = rgb.start();
	if (rc != openni::STATUS_OK){
		printf("%s:Couldn't start RGB stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
		rgb.destroy();
		return false;
	}
	return true;
}
示例#2
0
文件: test.cpp 项目: GYengera/mrpt
bool initONI2Stream(openni::Device& device, openni::SensorType sensorType, openni::VideoStream& stream, int w, int h, int fps, openni::PixelFormat format){
  openni::Status rc = openni::STATUS_OK;
  const char* strSensor;
  if(sensorType == openni::SENSOR_COLOR){
    strSensor = "openni::SENSOR_COLOR";
  }else if(sensorType == openni::SENSOR_DEPTH){
    strSensor = "openni::SENSOR_DEPTH";
  }else{
	printf("%s:Unknown SensorType -> %d\n", __FUNCTION__, sensorType);
    return false;
  }
  rc = stream.create(device, sensorType);
  if(rc != openni::STATUS_OK){
    printf("%s:Couldn't find sensor %s: %s\n", __FUNCTION__, strSensor, openni::OpenNI::getExtendedError());
    return false;
  }
  openni::VideoMode options = stream.getVideoMode();
  printf("%s:Initial resolution %s (%d, %d) FPS %d Format %d\n", __FUNCTION__, strSensor, options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
  if(setONI2StreamMode(stream, w, h, fps, format) == false){
    printf("%s:Can't find desired mode in the %s\n", __FUNCTION__, strSensor);
    return false;
  }
  options = stream.getVideoMode();
  printf("  -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
  return true;
}
示例#3
0
文件: test.cpp 项目: EduFdez/mrpt
bool initONI2DepthStream(openni::Device& device, openni::VideoStream& depth, int w, int h, int fps, openni::PixelFormat format){
	openni::Status rc = depth.create(device, openni::SENSOR_DEPTH);
	if (rc != openni::STATUS_OK){
		printf("%s:Couldn't find depth stream:\n%s\n", __FUNCTION__, openni::OpenNI::getExtendedError());
		return false;
	}
	openni::VideoMode options = depth.getVideoMode();
	printf("Initial resolution Depth(%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
	if(setONI2StreamMode(depth, w, h, fps, format) == false){
		printf("%s:Can't find desired depth mode\n", __FUNCTION__ );
		return false;
	}
	options = depth.getVideoMode();
	printf("  -> (%d, %d) FPS %d Format %d\n", options.getResolutionX(), options.getResolutionY(), options.getFps(), options.getPixelFormat());
	depth.setMirroringEnabled(false);
	rc = depth.start();
	if (rc != openni::STATUS_OK){
		printf("Couldn't start depth stream:\n%s\n", openni::OpenNI::getExtendedError());
		depth.destroy();
		return false;
	}
	return true;
}
示例#4
0
void streamFrameListener::onNewFrame(openni::VideoStream& stream)
{
    LockGuard guard(mutex);
    stream.readFrame(&frameRef);

    if (!frameRef.isValid() || !frameRef.getData())
    {
        yInfo() << "frame lost";
        return;
    }

    int pixC;

    pixF     = stream.getVideoMode().getPixelFormat();
    pixC     = depthCameraDriver::pixFormatToCode(pixF);
    w        = frameRef.getWidth();
    h        = frameRef.getHeight();
    dataSize = frameRef.getDataSize();

    if (isReady == false)
    {
        isReady = true;
    }

    if(pixC == VOCAB_PIXEL_INVALID)
    {
        yError() << "depthCameraDriver: Pixel Format not recognized";
        return;
    }

    image.setPixelCode(pixC);
    image.resize(w, h);

    if(image.getRawImageSize() != frameRef.getDataSize())
    {
        yError() << "depthCameraDriver:device and local copy data size doesn't match";
        return;
    }

    memcpy((void*)image.getRawImage(), (void*)frameRef.getData(), frameRef.getDataSize());
    stamp.update();
    return;
}
示例#5
0
文件: Draw.cpp 项目: 1170390/OpenNI2
void drawCropStream(openni::VideoStream& stream, IntRect location, IntRect selection, int dividedBy)
{
	if (!stream.isCroppingSupported())
	{
		return;
	}

	openni::VideoMode Mode = stream.getVideoMode();

	// check if entire selection is in location
	if (selection.uLeft >= location.uLeft &&
		selection.uRight <= location.uRight &&
		selection.uBottom >= location.uBottom &&
		selection.uTop <= location.uTop)
	{
		IntRect cropRect;
		cropRect.uBottom = Mode.getResolutionY() * (selection.uBottom - location.uBottom) / (location.uTop - location.uBottom);
		cropRect.uTop    = Mode.getResolutionY() * (selection.uTop - location.uBottom)    / (location.uTop - location.uBottom);
		cropRect.uLeft   = Mode.getResolutionX()  * (selection.uLeft - location.uLeft)     / (location.uRight - location.uLeft);
		cropRect.uRight  = Mode.getResolutionX()  * (selection.uRight - location.uLeft)    / (location.uRight - location.uLeft);

		int originX, originY, width, height;

		originX    = cropRect.uLeft;
		originY    = cropRect.uBottom;
		width  = cropRect.uRight - cropRect.uLeft;
		height = cropRect.uTop   - cropRect.uBottom;

		if ((originX % dividedBy) != 0)
			originX -= (originX % dividedBy);
		if ((width % dividedBy) != 0)
			width += dividedBy - (width % dividedBy);

		setStreamCropping(stream, originX, originY, width, height);
	}
}
示例#6
0
int SensorOpenNI::initialize()
{
    LOG(INFO) << "Initializing OpenNI";
    ///< force shutdown before starting!!
    kinect::OpenNI::shutdown();

    kinect::Status rc;
    rc = kinect::STATUS_OK;

    /// Fetch the device URI to pass to Device::open()
    const char* deviceURI = kinect::ANY_DEVICE;

    /// Initialize the device
    rc = kinect::OpenNI::initialize();
    if(rc!=kinect::STATUS_OK)
    {
        mDebug()<<"Initialization Errors (if any): "<< kinect::OpenNI::getExtendedError();
        kinect::OpenNI::shutdown();
        exit(0);
    }

    /// Open the device using the previously fetched device URI
    rc = device.open(deviceURI);
    if (rc != kinect::STATUS_OK)
    {
        mDebug()<<"Device open failed: "<<kinect::OpenNI::getExtendedError();
        kinect::OpenNI::shutdown();
        exit(0);
    }

    /// Create the depth stream
    rc = g_depthStream.create(device, kinect::SENSOR_DEPTH);
    if (rc == kinect::STATUS_OK)
    {
        /// start the depth stream, if its creation was successful
        rc = g_depthStream.start();

        if (rc != kinect::STATUS_OK)
        {
            mDebug()<<"Couldn't start depth stream: "<<kinect::OpenNI::getExtendedError();
            g_depthStream.destroy();
            exit(0);
        }
    }
    else
    {
        mDebug()<<"Couldn't find depth stream: "<<kinect::OpenNI::getExtendedError();
        exit(0);
    }

    if (!g_depthStream.isValid())
    {
        mDebug()<<"No valid depth streams. Exiting";
        kinect::OpenNI::shutdown();
        exit(0);
    }

    /// Create the color stream
    rc = g_colorStream.create(device, kinect::SENSOR_COLOR);

    if (rc == kinect::STATUS_OK)
    {
        /// start the color stream, if its creation was successful
        rc = g_colorStream.start();

        if (rc != kinect::STATUS_OK)
        {
            mDebug()<<"Couldn't start color stream: "<<kinect::OpenNI::getExtendedError();
            g_colorStream.destroy();
            exit(0);
        }
    }
    else
    {
        mDebug()<<"Couldn't find color stream: "<<kinect::OpenNI::getExtendedError();
        exit(0);
    }

    if (!g_colorStream.isValid())
    {
        mDebug()<<"No valid color streams. Exiting";
        kinect::OpenNI::shutdown();
        exit(0);
    }

    /// Configure resolutions
    {
        /// Attempt to set for depth
        {
            kinect::VideoMode mode = g_depthStream.getVideoMode();
            if(((int)camera->FPS())==60)
                mode.setFps(60);
            else
                mode.setFps(30);
            mode.setResolution(camera->width(), camera->height());
            rc = g_depthStream.setVideoMode(mode);
            if (rc != kinect::STATUS_OK)
                std::cerr << "error setting video mode (depth)" << std::endl;
        }
        /// Attempt to set for color
        {
            kinect::VideoMode mode = g_colorStream.getVideoMode();
            if(((int)camera->FPS())==60)
                mode.setFps(60);
            else
                mode.setFps(30);
            mode.setFps(30); ///< @todo check!!!
            mode.setResolution(camera->width(), camera->height());
            rc = g_colorStream.setVideoMode(mode);
            if (rc != kinect::STATUS_OK)
                std::cerr << "error setting video mode (color)" << std::endl;
        }
    }


#ifdef THIS_CAUSES_INIT_STALLS
    /// Enable depth/color frame synchronization
    rc = device.setDepthColorSyncEnabled(true);
    if (rc != kinect::STATUS_OK)
    {
        qDebug()<<"Could not synchronise device";
        // VGA Kinect always seems to shut down here
        kinect::OpenNI::shutdown();
        exit(0);
    }
#endif

    /// Camera settings
    kinect::CameraSettings* settings = g_colorStream.getCameraSettings();
    settings->setAutoExposureEnabled(true);
    settings->setAutoWhiteBalanceEnabled(true);

    /// Fetch the camera intrinsics
#if 0
        float w = g_depthStream.getVideoMode().getResolutionX();protected:
示例#7
0
        float w = g_depthStream.getVideoMode().getResolutionX();protected:
        Camera*const camera;
        /// Device
        kinect::Device device;
        bool initialized;

        /// Streams
        kinect::VideoStream g_depthStream;
        kinect::VideoStream g_colorStream;

        /// Frames
        kinect::VideoFrameRef g_depthFrame;
        kinect::VideoFrameRef g_colorFrame;
        float fov_h = g_depthStream.getHorizontalFieldOfView();
        float fl_h = .5*w / tan(.5*fov_h);
        float h = g_depthStream.getVideoMode().getResolutionY();
        float fov_v = g_depthStream.getVerticalFieldOfView();
        float fl_v = .5*h / tan(.5*fov_v);
        std::cout << "cameras focal lengths" << fl_h << fl_v;
#endif

    initialized = true;
    return true;
}

SensorOpenNI::~SensorOpenNI()
{
    if(initialized){
        LOG(INFO) << "Shutting down Kinect...";
        flush(std::cout);
        g_depthStream.destroy();
示例#8
0
 int                getFrameWidth() const{ return m_stream.getVideoMode().getResolutionX(); }