示例#1
0
    bool buildFrame(libfreenect2::Frame* lf2Frame)
    {
      if (!running)
        return false;

      OniFrame* oniFrame = getServices().acquireFrame();
      oniFrame->frameIndex = frame_id++;
      oniFrame->timestamp = lf2Frame->timestamp;
      oniFrame->videoMode = video_mode;
      oniFrame->width = video_mode.resolutionX;
      oniFrame->height = video_mode.resolutionY;

      if (cropping.enabled)
      {
        oniFrame->height = cropping.height;
        oniFrame->width = cropping.width;
        oniFrame->cropOriginX = cropping.originX;
        oniFrame->cropOriginY = cropping.originY;
        oniFrame->croppingEnabled = true;
      }
      else
      {
        oniFrame->cropOriginX = 0;
        oniFrame->cropOriginY = 0;
        oniFrame->croppingEnabled = false;
      }
      int width = std::min(oniFrame->width, (int)lf2Frame->width);
      int height = std::min(oniFrame->height, (int)lf2Frame->height);

      populateFrame(lf2Frame, oniFrame->cropOriginX, oniFrame->cropOriginY, oniFrame, 0, 0, width, height);
      raiseNewFrame(oniFrame);
      getServices().releaseFrame(oniFrame);

      return false;
    }
示例#2
0
	void Mainloop()
	{
		int frameId = 1;
		int xdir = 1;
		int ydir = 1;
		struct {int x, y;} center = {0,0};
		while (m_running)
		{
//			printf("Tick");
			OniFrame* pFrame = getServices().acquireFrame();

			if (pFrame == NULL) {printf("Didn't get frame...\n"); continue;}

			// Fill frame
			xnOSMemSet(pFrame->data, 0, pFrame->dataSize);

			OniDepthPixel* pDepth = (OniDepthPixel*)pFrame->data;

			for (int y1 = XN_MAX(center.y-10, 0); y1 < XN_MIN(center.y+10, OZ_RESOLUTION_Y); ++y1)
				for (int x1 = XN_MAX(center.x-10, 0); x1 < XN_MIN(center.x+10, OZ_RESOLUTION_X); ++x1)
					if ((x1-center.x)*(x1-center.x)+(y1-center.y)*(y1-center.y) < 70)
						pDepth[singleRes(x1, y1)] = OniDepthPixel(1000+(x1-y1)*3);

//			pDepth[singleRes(center.x, center.y)] = 1000;

			center.x += xdir;
			center.y += ydir;

			if (center.x < abs(xdir) || center.x > OZ_RESOLUTION_X-1-abs(xdir)) xdir*=-1;
			if (center.y < abs(ydir) || center.y > OZ_RESOLUTION_Y-1-abs(ydir)) ydir*=-1;

			for (int i = 0; i < OZ_RESOLUTION_X; ++i) pDepth[i] = 2000;
			pDepth[0] = 2000;

			// Fill metadata
			pFrame->frameIndex = frameId;

			pFrame->videoMode.pixelFormat = ONI_PIXEL_FORMAT_DEPTH_1_MM;
			pFrame->videoMode.resolutionX = OZ_RESOLUTION_X;
			pFrame->videoMode.resolutionY = OZ_RESOLUTION_Y;
			pFrame->videoMode.fps = 30;

			pFrame->width = OZ_RESOLUTION_X;
			pFrame->height = OZ_RESOLUTION_Y;

			pFrame->cropOriginX = pFrame->cropOriginY = 0;
			pFrame->croppingEnabled = FALSE;

			pFrame->sensorType = ONI_SENSOR_DEPTH;
			pFrame->stride = OZ_RESOLUTION_X*sizeof(OniDepthPixel);
			pFrame->timestamp = frameId*33000;

			raiseNewFrame(pFrame);
			getServices().releaseFrame(pFrame);

			frameId++;

			xnOSSleep(33);
		}
	}
示例#3
0
    void buildFrame(void* data, uint32_t timestamp)
    {
      if (!running)
        return;

      OniFrame* frame = getServices().acquireFrame();
      frame->frameIndex = frame_id++;
      frame->videoMode = video_mode;
      frame->width = video_mode.resolutionX;
      frame->height = video_mode.resolutionY;

      // Handle overflow, input timestamp comes from a 60MHz clock and overflows
      // in ~70s
      if (timestamp < prev_timestamp)
      {
        uint32_t prev_int = static_cast<uint32_t>(prev_timestamp);
        uint64_t temp_delta = std::abs(timestamp - prev_int);
        prev_timestamp += temp_delta;
      } else {
        prev_timestamp = timestamp;
      }

      // OpenNI wants the value in microseconds
      frame->timestamp = prev_timestamp / 60;

      populateFrame(data, frame);
      raiseNewFrame(frame);
      getServices().releaseFrame(frame);
    }
	void Mainloop()
    {
		m_running = true;

		while (m_running)
		{
            OniFrame* pFrame = getServices().acquireFrame();
			BuildFrame(pFrame);
			raiseNewFrame(pFrame);
		}
    }
    void buildFrame(void* data, uint32_t timestamp) {
      if (!running)
        return;     

      OniFrame* frame = getServices().acquireFrame();
      frame->frameIndex = frame_id++;
      frame->timestamp = timestamp;
      frame->videoMode = video_mode;
      frame->width = video_mode.resolutionX;
      frame->height = video_mode.resolutionY;
      
      populateFrame(data, frame);
      raiseNewFrame(frame);
      getServices().releaseFrame(frame);
    }
	void Mainloop()
	{
		int frameId = 1;
		while (m_running)
		{
			m_osEvent.Wait(XN_WAIT_INFINITE);
			m_osEvent.Reset();

			OniFrame* pFrame = getServices().acquireFrame();
			if (pFrame == NULL) {printf("Didn't get frame...\n"); continue;}

			// Fill frame
			xnOSMemSet(pFrame->data, 0, pFrame->dataSize);

			pFrame->frameIndex = frameId;

			pFrame->videoMode.pixelFormat = ONI_PIXEL_FORMAT_RGB888;
			pFrame->videoMode.resolutionX = DEPTHSENSE_COLOR_RESOLUTION_X;
			pFrame->videoMode.resolutionY = DEPTHSENSE_COLOR_RESOLUTION_Y;
			pFrame->videoMode.fps = 30;

			pFrame->width = DEPTHSENSE_COLOR_RESOLUTION_X;
			pFrame->height = DEPTHSENSE_COLOR_RESOLUTION_Y;

			xnOSMemCopy( pFrame->data, &m_data[0], m_data.size() );

			pFrame->cropOriginX = pFrame->cropOriginY = 0;
			pFrame->croppingEnabled = FALSE;

			pFrame->sensorType = ONI_SENSOR_COLOR;
			pFrame->stride = DEPTHSENSE_COLOR_RESOLUTION_X*sizeof(OniDepthPixel);
			pFrame->timestamp = frameId * 33000;

			raiseNewFrame(pFrame);
			getServices().releaseFrame(pFrame);

			frameId++;
		}
	}
示例#7
0
	void Mainloop()
	{
		int frameId = 1;
		int xdir = -3;
		int ydir = 1;
		struct {int x, y;} center = {160,120};
		while (m_running)
		{
			xnOSSleep(33);
			//			printf("Tick");
			OniFrame* pFrame = getServices().acquireFrame();

			if (pFrame == NULL) {printf("Didn't get frame...\n"); continue;}

			// Fill frame
			xnOSMemSet(pFrame->data, 0, pFrame->dataSize);

			OniRGB888Pixel* pImage = (OniRGB888Pixel*)pFrame->data;


			for (int y = XN_MAX(center.y-10, 0); y < XN_MIN(center.y+10, OZ_RESOLUTION_Y); ++y)
				for (int x = XN_MAX(center.x-10, 0); x < XN_MIN(center.x+10, OZ_RESOLUTION_X); ++x)
					if ((x-center.x)*(x-center.x)+(y-center.y)*(y-center.y) < 70)
				{
					pImage[singleRes(x, y)].r = (char)(255*(x/(double)OZ_RESOLUTION_X));
					pImage[singleRes(x, y)].g = (char)(255*(y/(double)OZ_RESOLUTION_Y));
					pImage[singleRes(x, y)].b = (char)(255*((OZ_RESOLUTION_X-x)/(double)OZ_RESOLUTION_X));
				}
//			pImage[singleRes(center.x, center.y)].r = 255;

			center.x += xdir;
			center.y += ydir;

			if (center.x < abs(xdir) || center.x > OZ_RESOLUTION_X-1-abs(xdir)) xdir*=-1;
			if (center.y < abs(ydir) || center.y > OZ_RESOLUTION_Y-1-abs(ydir)) ydir*=-1;



			pImage[0].b = (unsigned char)255;

			// 			for (int y = 0; y < OZ_RESOLUTION_Y; ++y)
			// 			{
			// 				pDepth[y*OZ_RESOLUTION_X+(OZ_RESOLUTION_Y-y)] = pDepth[y*OZ_RESOLUTION_X+(y)] = 500+y;
			// 			}

			// Fill metadata
			pFrame->frameIndex = frameId;

			pFrame->videoMode.pixelFormat = ONI_PIXEL_FORMAT_RGB888;
			pFrame->videoMode.resolutionX = OZ_RESOLUTION_X;
			pFrame->videoMode.resolutionY = OZ_RESOLUTION_Y;
			pFrame->videoMode.fps = 30;

			pFrame->width = OZ_RESOLUTION_X;
			pFrame->height = OZ_RESOLUTION_Y;

			pFrame->cropOriginX = pFrame->cropOriginY = 0;
			pFrame->croppingEnabled = FALSE;

			pFrame->sensorType = ONI_SENSOR_COLOR;
			pFrame->stride = OZ_RESOLUTION_X*3;
			pFrame->timestamp = frameId*33000;

			raiseNewFrame(pFrame);
			getServices().releaseFrame(pFrame);

			frameId++;
		}
	}