示例#1
0
void FreenectGrabber :: run()
{
    setThreadShouldExit(false);
    m_current_image.setCalibration(m_calib_data);
    m_rgbd_image.setCalibration(m_calib_data);

    m_rgbd_image.rawRgbRef() = Mat3b(FREENECT_FRAME_H, FREENECT_FRAME_W);
    m_rgbd_image.rawDepthRef() = Mat1f(FREENECT_FRAME_H, FREENECT_FRAME_W);
    m_rgbd_image.rawIntensityRef() = Mat1f(FREENECT_FRAME_H, FREENECT_FRAME_W);

    m_current_image.rawRgbRef() = Mat3b(FREENECT_FRAME_H, FREENECT_FRAME_W);
    m_current_image.rawDepthRef() = Mat1f(FREENECT_FRAME_H, FREENECT_FRAME_W);
    m_current_image.rawIntensityRef() = Mat1f(FREENECT_FRAME_H, FREENECT_FRAME_W);

    startKinect();
    int64 last_grab_time = 0;

    while (!threadShouldExit())
    {
        waitForNewEvent(-1); // Use infinite timeout in order to honor sync mode.

        while (m_depth_transmitted || m_rgb_transmitted)
            freenect_process_events(f_ctx);

        // m_current_image.rawDepth().copyTo(m_current_image.rawAmplitudeRef());
        // m_current_image.rawDepth().copyTo(m_current_image.rawIntensityRef());

        {
            int64 grab_time = ntk::Time::getMillisecondCounter();
            ntk_dbg_print(grab_time - last_grab_time, 2);
            last_grab_time = grab_time;
            QWriteLocker locker(&m_lock);
            // FIXME: ugly hack to handle the possible time
            // gaps between rgb and IR frames in dual mode.
            if (m_dual_ir_rgb)
                m_current_image.copyTo(m_rgbd_image);
            else
                m_current_image.swap(m_rgbd_image);
            m_rgb_transmitted = true;
            m_depth_transmitted = true;
        }

        if (m_dual_ir_rgb)
            setIRMode(!m_ir_mode);
        advertiseNewFrame();
#ifdef _WIN32
        // FIXME: this is to avoid GUI freezes with libfreenect on Windows.
        // See http://groups.google.com/group/openkinect/t/b1d828d108e9e69
        Sleep(1);
#endif
    }
}
示例#2
0
文件: event.cpp 项目: Sergejus/nestk
void AsyncEventListener :: customEvent(QEvent* generic_event)
{
    if (generic_event->type() != QEvent::User)
        return QObject::customEvent(generic_event);

    generic_event->accept();
    m_handler_running = true;
    while (m_event_signaled)
    {
        m_event_signaled = false;
        Event event = waitForNewEvent();
        handleAsyncEvent(event);
        // FIXME: this is important on Windows to avoid the application
        // spending all its time handling these custom events.
        QApplication::processEvents();
    }
    m_handler_running = false;
}
void OpenniGrabber :: run()
{
    m_should_exit = false;
    m_current_image.setCalibration(m_calib_data);
    m_rgbd_image.setCalibration(m_calib_data);

    // Depth
    m_rgbd_image.rawDepthRef() = Mat1f(m_calib_data->raw_depth_size);
    m_rgbd_image.rawDepthRef() = 0.f;
    m_rgbd_image.depthRef() = m_rgbd_image.rawDepthRef();
    m_current_image.rawDepthRef() = Mat1f(m_calib_data->raw_depth_size);
    m_current_image.rawDepthRef() = 0.f;
    m_current_image.depthRef() = m_current_image.rawDepthRef();

    // Color
    if (m_has_rgb)
    {
        m_rgbd_image.rawRgbRef() = Mat3b(m_calib_data->rawRgbSize());
        m_rgbd_image.rawRgbRef() = Vec3b(0,0,0);
        m_rgbd_image.rgbRef() = m_rgbd_image.rawRgbRef();
        m_current_image.rawRgbRef() = Mat3b(m_calib_data->rawRgbSize());
        m_current_image.rawRgbRef() = Vec3b(0,0,0);
        m_current_image.rgbRef() = m_current_image.rawRgbRef();

        m_rgbd_image.rawIntensityRef() = Mat1f(m_calib_data->rawRgbSize());
        m_rgbd_image.rawIntensityRef() = 0.f;
        m_rgbd_image.intensityRef() = m_rgbd_image.rawIntensityRef();
        m_current_image.rawIntensityRef() = Mat1f(m_calib_data->rawRgbSize());
        m_current_image.rawIntensityRef() = 0.f;
        m_current_image.intensityRef() = m_current_image.rawIntensityRef();
    }

    // User tracking
    m_rgbd_image.userLabelsRef() = cv::Mat1b(m_calib_data->raw_depth_size);
    m_rgbd_image.userLabelsRef() = 0u;

    if (m_track_users)
        m_rgbd_image.setSkeletonData(new Skeleton());

    m_current_image.userLabelsRef() = cv::Mat1b(m_calib_data->raw_depth_size);
    m_current_image.userLabelsRef() = 0u;

    if (m_track_users)
        m_current_image.setSkeletonData(new Skeleton());

    if (m_has_rgb)
    {
        bool mapping_required = m_calib_data->rawRgbSize() != m_calib_data->raw_depth_size;
        if (!mapping_required)
        {
            m_rgbd_image.mappedRgbRef() = m_rgbd_image.rawRgbRef();
            m_rgbd_image.mappedDepthRef() = m_rgbd_image.rawDepthRef();
            m_current_image.mappedRgbRef() = m_current_image.rawRgbRef();
            m_current_image.mappedDepthRef() = m_current_image.rawDepthRef();
        }
        else
        {
            m_rgbd_image.mappedRgbRef() = Mat3b(m_calib_data->raw_depth_size);
            m_rgbd_image.mappedRgbRef() = Vec3b(0,0,0);
            m_rgbd_image.mappedDepthRef() = Mat1f(m_calib_data->rawRgbSize());
            m_rgbd_image.mappedDepthRef() = 0.f;
            m_current_image.mappedRgbRef() = Mat3b(m_calib_data->rawDepthSize());
            m_current_image.mappedRgbRef() = Vec3b(0,0,0);
            m_current_image.mappedDepthRef() = Mat1f(m_calib_data->rawRgbSize());
            m_current_image.mappedDepthRef() = 0.f;
        }
    }

    m_rgbd_image.setCameraSerial(cameraSerial());
    m_current_image.setCameraSerial(cameraSerial());

    xn::SceneMetaData sceneMD;
    xn::DepthMetaData depthMD;
    xn::ImageMetaData rgbMD;
    xn::IRMetaData irMD;

    ImageBayerGRBG bayer_decoder(ImageBayerGRBG::EdgeAware);

    RGBDImage oversampled_image;
    if (m_subsampling_factor != 1)
    {
        oversampled_image.rawDepthRef().create(m_calib_data->rawDepthSize()*m_subsampling_factor);
        oversampled_image.userLabelsRef().create(oversampled_image.rawDepth().size());
    }

    while (!m_should_exit)
    {
        waitForNewEvent();
        ntk_dbg(2) << format("[%x] running iteration", this);

        {
            // OpenNI calls do not seem to be thread safe.
            QMutexLocker ni_locker(&m_ni_mutex);
            waitAndUpdateActiveGenerators();
        }

        if (m_track_users && m_body_event_detector)
            m_body_event_detector->update();

        m_ni_depth_generator.GetMetaData(depthMD);
        if (m_has_rgb)
        {
            if (m_get_infrared)
            {
                m_ni_ir_generator.GetMetaData(irMD);
            }
            else
            {
                m_ni_rgb_generator.GetMetaData(rgbMD);
            }
        }

        RGBDImage& temp_image =
                m_subsampling_factor == 1 ? m_current_image : oversampled_image;

        const XnDepthPixel* pDepth = depthMD.Data();
        ntk_assert((depthMD.XRes() == temp_image.rawDepth().cols)
                   && (depthMD.YRes() == temp_image.rawDepth().rows),
                   "Invalid image size.");

        // Convert to meters.
        const float depth_correction_factor = 1.0;
        float* raw_depth_ptr = temp_image.rawDepthRef().ptr<float>();
        for (int i = 0; i < depthMD.XRes()*depthMD.YRes(); ++i)
            raw_depth_ptr[i] = depth_correction_factor * pDepth[i]/1000.f;

        if (m_has_rgb)
        {
            if (m_get_infrared)
            {
                const XnGrayscale16Pixel* pImage = irMD.Data();
                m_current_image.rawIntensityRef().create(irMD.YRes(), irMD.XRes());
                float* raw_img_ptr = m_current_image.rawIntensityRef().ptr<float>();
                for (int i = 0; i < irMD.XRes()*irMD.YRes(); ++i)
                {
                    raw_img_ptr[i] = pImage[i];
                }
            }
            else
            {
                if (m_custom_bayer_decoding)
                {
                    uchar* raw_rgb_ptr = m_current_image.rawRgbRef().ptr<uchar>();
                    bayer_decoder.fillRGB(rgbMD,
                                          m_current_image.rawRgb().cols, m_current_image.rawRgb().rows,
                                          raw_rgb_ptr);
                    cvtColor(m_current_image.rawRgbRef(), m_current_image.rawRgbRef(), CV_RGB2BGR);
                }
                else
                {
                    const XnUInt8* pImage = rgbMD.Data();
                    ntk_assert(rgbMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24, "Invalid RGB format.");
                    uchar* raw_rgb_ptr = m_current_image.rawRgbRef().ptr<uchar>();
                    for (int i = 0; i < rgbMD.XRes()*rgbMD.YRes()*3; i += 3)
                        for (int k = 0; k < 3; ++k)
                        {
                            raw_rgb_ptr[i+k] = pImage[i+(2-k)];
                        }
                }
            }
        }

        if (m_track_users)
        {
            m_ni_user_generator.GetUserPixels(0, sceneMD);
            uchar* user_mask_ptr = temp_image.userLabelsRef().ptr<uchar>();
            const XnLabel* pLabel = sceneMD.Data();
            for (int i = 0; i < sceneMD.XRes()*sceneMD.YRes(); ++i)
            {
                user_mask_ptr[i] = pLabel[i];
            }

            XnUserID user_ids[15];
            XnUInt16 num_users = 15;
            m_ni_user_generator.GetUsers(user_ids, num_users);

            // FIXME: only one user supported.
            for (int i = 0; i < num_users; ++i)
            {
                XnUserID user_id = user_ids[i];
                if (m_ni_user_generator.GetSkeletonCap().IsTracking(user_id))
                {
                    m_current_image.skeletonRef()->computeJoints(user_id, m_ni_user_generator, m_ni_depth_generator);
                    break;
                }
            }
        }

        if (m_subsampling_factor != 1)
        {
            // Cannot use interpolation here, since this would
            // spread the invalid depth values.
            cv::resize(oversampled_image.rawDepth(),
                       m_current_image.rawDepthRef(),
                       m_current_image.rawDepth().size(),
                       0, 0, INTER_NEAREST);
            // we have to repeat this, since resize can change the pointer.
            // m_current_image.depthRef() = m_current_image.rawDepthRef();
            cv::resize(oversampled_image.userLabels(),
                       m_current_image.userLabelsRef(),
                       m_current_image.userLabels().size(),
                       0, 0, INTER_NEAREST);
        }

        m_current_image.setTimestamp(getCurrentTimestamp());

        {
            QWriteLocker locker(&m_lock);
            m_current_image.swap(m_rgbd_image);
        }

        advertiseNewFrame();
    }
    ntk_dbg(1) << format("[%x] finishing", this);
}
示例#4
0
void NiteRGBDGrabber :: run()
{
  m_should_exit = false;
  m_current_image.setCalibration(m_calib_data);
  m_rgbd_image.setCalibration(m_calib_data);

  m_rgbd_image.rawRgbRef() = Mat3b(m_calib_data->rawRgbSize());
  m_rgbd_image.rawDepthRef() = Mat1f(m_calib_data->raw_depth_size);
  m_rgbd_image.rawIntensityRef() = Mat1f(m_calib_data->rawRgbSize());

  m_rgbd_image.rawIntensityRef() = 0.f;
  m_rgbd_image.rawDepthRef() = 0.f;
  m_rgbd_image.rawRgbRef() = Vec3b(0,0,0);

  m_rgbd_image.rgbRef() = m_rgbd_image.rawRgbRef();
  m_rgbd_image.depthRef() = m_rgbd_image.rawDepthRef();
  m_rgbd_image.intensityRef() = m_rgbd_image.rawIntensityRef();

  m_rgbd_image.userLabelsRef() = cv::Mat1b(m_calib_data->raw_depth_size);
  m_rgbd_image.userLabelsRef() = 0u;

  m_rgbd_image.setSkeletonData(new Skeleton());

  m_current_image.rawRgbRef() = Mat3b(m_calib_data->rawRgbSize());
  m_current_image.rawRgbRef() = Vec3b(0,0,0);
  m_current_image.rawDepthRef() = Mat1f(m_calib_data->raw_depth_size);
  m_current_image.rawDepthRef() = 0.f;
  m_current_image.rawIntensityRef() = Mat1f(m_calib_data->rawRgbSize());
  m_current_image.rawIntensityRef() = 0.f;

  m_current_image.rgbRef() = m_current_image.rawRgbRef();
  m_current_image.depthRef() = m_current_image.rawDepthRef();
  m_current_image.intensityRef() = m_current_image.rawIntensityRef();

  m_current_image.userLabelsRef() = cv::Mat1b(m_calib_data->raw_depth_size);
  m_current_image.userLabelsRef() = 0u;

  m_current_image.setSkeletonData(new Skeleton());

  bool mapping_required = m_calib_data->rawRgbSize() != m_calib_data->raw_depth_size;
  if (!mapping_required)
  {
    m_rgbd_image.mappedRgbRef() = m_rgbd_image.rawRgbRef();
    m_rgbd_image.mappedDepthRef() = m_rgbd_image.rawDepthRef();
    m_current_image.mappedRgbRef() = m_current_image.rawRgbRef();
    m_current_image.mappedDepthRef() = m_current_image.rawDepthRef();
  }
  else
  {
    m_rgbd_image.mappedRgbRef() = Mat3b(m_calib_data->raw_depth_size);
    m_rgbd_image.mappedRgbRef() = Vec3b(0,0,0);
    m_rgbd_image.mappedDepthRef() = Mat1f(m_calib_data->rawRgbSize());
    m_rgbd_image.mappedDepthRef() = 0.f;
    m_current_image.mappedRgbRef() = Mat3b(m_calib_data->raw_depth_size);
    m_current_image.mappedRgbRef() = Vec3b(0,0,0);
    m_current_image.mappedDepthRef() = Mat1f(m_calib_data->rawRgbSize());
    m_current_image.mappedDepthRef() = 0.f;
  }

  xn::SceneMetaData sceneMD;
  xn::DepthMetaData depthMD;
  xn::ImageMetaData rgbMD;

  ImageBayerGRBG bayer_decoder(ImageBayerGRBG::EdgeAware);

  while (!m_should_exit)
  {
    waitForNewEvent();
    m_ni_context.WaitAndUpdateAll();
    if (m_body_event_detector)
      m_body_event_detector->update();
    m_ni_depth_generator.GetMetaData(depthMD);
    m_ni_rgb_generator.GetMetaData(rgbMD);
      if (enable_skeleton_tracking) 
          m_ni_user_generator.GetUserPixels(0, sceneMD);

    const XnDepthPixel* pDepth = depthMD.Data();
    ntk_assert((depthMD.XRes() == m_current_image.rawDepth().cols)
               && (depthMD.YRes() == m_current_image.rawDepth().rows),
               "Invalid image size.");

    // Convert to meters.
    float* raw_depth_ptr = m_current_image.rawDepthRef().ptr<float>();
    for (int i = 0; i < depthMD.XRes()*depthMD.YRes(); ++i)
      raw_depth_ptr[i] = pDepth[i]/1000.f;

    if (m_custom_bayer_decoding)
    {
      uchar* raw_rgb_ptr = m_current_image.rawRgbRef().ptr<uchar>();
      bayer_decoder.fillRGB(rgbMD,
                            m_current_image.rawRgb().cols, m_current_image.rawRgb().rows,
                            raw_rgb_ptr);
      cvtColor(m_current_image.rawRgbRef(), m_current_image.rawRgbRef(), CV_RGB2BGR);
    }
    else
    {
      const XnUInt8* pImage = rgbMD.Data();
      ntk_assert(rgbMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24, "Invalid RGB format.");
      uchar* raw_rgb_ptr = m_current_image.rawRgbRef().ptr<uchar>();
      for (int i = 0; i < rgbMD.XRes()*rgbMD.YRes()*3; i += 3)
        for (int k = 0; k < 3; ++k)
        {
          raw_rgb_ptr[i+k] = pImage[i+(2-k)];
        }
    }

      if (enable_skeleton_tracking)  {
          uchar* user_mask_ptr = m_current_image.userLabelsRef().ptr<uchar>();
          const XnLabel* pLabel = sceneMD.Data();
          for (int i = 0; i < sceneMD.XRes()*sceneMD.YRes(); ++i)
          {
              user_mask_ptr[i] = pLabel[i];
          }
          
          XnUserID user_ids[15];
          XnUInt16 num_users = 15;
          m_ni_user_generator.GetUsers(user_ids, num_users);
          
          // FIXME: only one user supported.
          for (int i = 0; i < num_users; ++i)
          {
              XnUserID user_id = user_ids[i];
              if (m_ni_user_generator.GetSkeletonCap().IsTracking(user_id))
              {
                  m_current_image.skeletonRef()->computeJoints(user_id, m_ni_user_generator, m_ni_depth_generator);
                  break;
              }
          }   
      }

    {
      QWriteLocker locker(&m_lock);
      m_current_image.swap(m_rgbd_image);
    }

    advertiseNewFrame();
  }
}