Exemplo n.º 1
0
void FreenectGrabber :: rgbCallBack(uint8_t *buf, int width, int height)
{
    ntk_assert(width == m_current_image.rawRgb().cols, "Bad width");
    ntk_assert(height == m_current_image.rawRgb().rows, "Bad height");
    std::copy(buf, buf+3*width*height, (uint8_t*)m_current_image.rawRgbRef().ptr());
    cvtColor(m_current_image.rawRgb(), m_current_image.rawRgbRef(), CV_RGB2BGR);
    m_rgb_transmitted = false;
}
Exemplo n.º 2
0
void FreenectGrabber :: irCallBack(uint8_t *buf, int width, int height)
{
    ntk_assert(width == m_current_image.rawIntensity().cols, "Bad width");
    ntk_assert(height == m_current_image.rawIntensity().rows, "Bad height");
    float* intensity_buf = m_current_image.rawIntensityRef().ptr<float>();
    for (int i = 0; i < width*height; ++i)
        *intensity_buf++ = *buf++;
    m_rgb_transmitted = false;
}
Exemplo n.º 3
0
void FreenectGrabber :: depthCallBack(uint16_t *buf, int width, int height)
{
    ntk_assert(width == m_current_image.rawDepth().cols, "Bad width");
    ntk_assert(height == m_current_image.rawDepth().rows, "Bad height");
    float* depth_buf = m_current_image.rawDepthRef().ptr<float>();
    for (int i = 0; i < width*height; ++i)
        *depth_buf++ = *buf++;
    m_depth_transmitted = false;
}
Exemplo n.º 4
0
 void keep_local_maxima(std::vector<cv::Point2f>* maxima, const std::vector<double>& histogram)
 {
   ntk_assert(maxima != 0, "Maxima is null.");
   ntk_assert(histogram.size() >= 2, "Histogram to small.");
   double prev_value = histogram[0];
   maxima->push_back(cv::Point2f(0, histogram[0]));
   for (size_t i = 1; i < histogram.size()-1; ++i)
   {
     double next_value = i < histogram.size()-1 ? histogram[i+1] : -FLT_MAX;
     if (histogram[i] >= prev_value && histogram[i] >= next_value)
       maxima->push_back(cv::Point2f(i, histogram[i]));
     prev_value = histogram[i];
   }
   maxima->push_back(cv::Point2f(histogram.size()-1, histogram.back()));
 }
Exemplo n.º 5
0
 double normalize_angle_0_2pi(double angle)
 {
   angle = fmod(angle, 2.0*M_PI);
   if (angle < 0) angle += 2.0*M_PI;
   ntk_assert(ntk::in_range(0.0, angle, 2.0*M_PI), "Angle not in [0, 2pi]");
   return angle;
 }
Exemplo n.º 6
0
  // Finds parameters b, n in the confidence region induced by confidence
  // which maximize wblinv(c, b, n)
	WeibullEstimates 
  estimate_weibull_upper_bounds(std::map<double,double> distribution,
                                const WeibullEstimates& best_params,
                                double confidence,
                                double c)
  {
    if (has_key(distribution, 0))
      distribution[0.0001] += distribution[0];
    distribution.erase(0);
	  if (distribution.size() < 5)
      return best_params;
  	weibull_upper_bounds_function f(distribution, best_params, confidence, c);
    // vnl_lbfgs minimizer(f);
    vnl_powell minimizer(&f);
    // vnl_conjugate_gradient minimizer(f);
    vnl_vector<double> estimate(2);
    estimate[0] = best_params.beta;
    estimate[1] = best_params.etha;
    ntk_assert(minimizer.minimize(estimate), "");
    if (std::abs(estimate[0]-best_params.beta) > (best_params.beta*0.8)
        || std::abs(estimate[1]-best_params.etha) > (best_params.etha*0.8))
    {
      std::cerr << "Warning: algorithm did not converge." << std::endl;
      return best_params;
    }
    WeibullEstimates estimates;
    estimates.beta = estimate[0];
    estimates.etha = estimate[1];
    estimates.gamma = best_params.gamma;
    estimates.logl = 0;
    return estimates;
  }
Exemplo n.º 7
0
 void estimate_weibull3(std::map<double,double> distribution, 
                        WeibullEstimates& e)
 {
   if (has_key(distribution, 0))
     distribution[0.0001] += distribution[0];
   distribution.erase(0);
   weibull3_likelihood_function f(distribution, e);
   // vnl_lbfgs minimizer(f);
   // vnl_powell minimizer(&f);
   vnl_amoeba minimizer(f);
   vnl_vector<double> estimate(1);
   estimate[0] = e.gamma;
   minimizer.minimize(estimate);
   ntk_assert(flt_eq(e.gamma, estimate[0]), "");
 }
Exemplo n.º 8
0
  void ObjectMatch :: loadFromXmlElement(const XMLNode& element, XmlContext* context)
  {
    const ObjectDetector* finder = dynamic_cast<const ObjectDetector*>(context);    
    ntk_assert(finder != 0, "Bad context.");
    m_finder_data = &finder->data();
    std::string model_name;
    loadFromXmlAttribute(element, "model", model_name);
    const VisualObject* obj = finder->objectDatabase().visualObject(model_name);
    if (obj == 0) ntk_throw_exception("Unknown model: " + model_name);

    int view_id = 0;
    loadFromXmlAttribute(element, "view", view_id);
    
    ObjectPosePtr p (new ObjectPose(*m_finder_data, obj->views()[view_id], ntk::AffineTransform::identity));
    loadFromXmlChild(element, "pose", *p);
    m_pose = p;
  }
Exemplo n.º 9
0
 weibull_upper_bounds_function(const distrib_type& distrib,
                               WeibullEstimates estimates,
                               double confidence,
                               double c)
 	: vnl_cost_function(2), // 2 parameters
     distrib(distrib),
     estimates(estimates),
     confidence(confidence),
     c(c),
     gamma(estimates.gamma)
 {
   nbelem = 0;
   foreach_const_it(it, distrib, distrib_type)
     nbelem += it->second;
   WeibullDistrib m (estimates.beta, estimates.etha, estimates.gamma);
   logl_estimates = model_log_likelihood(distrib, nbelem, m);
   ntk_assert(confidence <= 0.99996 && confidence >= 0.99994, "");
 }
Exemplo n.º 10
0
  MeshRenderer :: MeshRenderer(int image_width, int image_height, float transparency)
    : m_mesh(0), m_transparency(transparency)
  {
    m_depth_buffer = cv::Mat1f(Size(image_width, image_height));
    m_color_buffer = cv::Mat4b(Size(image_width, image_height));
    m_context = new QGLContext(QGLFormat(QGL::SampleBuffers|QGL::DepthBuffer|QGL::AlphaChannel));
    m_pbuffer = new QGLPixelBuffer(QSize(image_width, image_height), m_context->format());
    m_pbuffer->makeCurrent();

    GLenum err = glewInit();
    ntk_ensure(err == GLEW_OK, "Could not load GLEW.");
    std::cout << "Status: Using GLEW " << glewGetString(GLEW_VERSION) << std::endl;
    ntk_assert(GLEW_ARB_vertex_buffer_object, "Cannot use vertex buffer.");

    glEnable(GL_DEPTH);
    glEnable(GL_DEPTH_TEST);
    glEnable(GL_CULL_FACE);

    glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  }
Exemplo n.º 11
0
  void SiftHough :: houghpointsFromMatch(std::list<HoughPoint>& points, SiftPointMatchConstPtr match)
  {
    ntk_assert(match, "Null pointer.");
    const SiftPointTransform& point_transform = match->pointTransform();

    std::vector<unsigned char> orientation_bins(2);
    std::vector<int> scale_bins(2);
    std::vector<int> x_bins(2);
    std::vector<int> y_bins(2);

    // orientation bins
    {
      unsigned max_bins = (unsigned) (std::floor((2.0*M_PI)/m_params.delta_orientation)-1);
      double o = point_transform.orientation / m_params.delta_orientation;
      orientation_bins[0] = (unsigned) std::floor(o);
      if ((o-orientation_bins[0]) < 0.5)
      {
        if (orientation_bins[0] == 0) orientation_bins[1] = max_bins;
        else orientation_bins[1] = orientation_bins[0] - 1;
      }
      else
      {
        if (orientation_bins[0] == max_bins) orientation_bins[1] = 0;
        else orientation_bins[1] = orientation_bins[0] + 1;
      }
      ntk_dbg_print(point_transform.orientation, 2);
      ntk_dbg_print(o, 2);
      ntk_dbg_print(orientation_bins[0], 2);
      ntk_dbg_print(orientation_bins[1], 2);
    }

    // scale bins
    {
      double s = log(point_transform.scale)/log(2.0);
      scale_bins[0] = (int)floor(s);
      int s2;
      if (s > scale_bins[0]+0.5) s2 = scale_bins[0] + 1;
      else s2 = scale_bins[0] - 1;
      scale_bins[1] = s2;
    }

    const VisualObjectView& model = match->pose()->modelView();
    cv::Rect_<float> model_bbox = model.boundingRect();
    const ObjectPose& model_pose = *match->pose();

    // Vote for 16 bins in most cases.
    for (unsigned s = 0; s < scale_bins.size(); ++s)
    for (unsigned o = 0; o < orientation_bins.size(); ++o)
    {
      HoughPoint p;
      p.object_id = model.id();
      p.orientation = orientation_bins[o];
      p.scale = scale_bins[s];

      double actual_scale = pow(2.0, p.scale);
      ntk_dbg_print(actual_scale, 2);
      // double max_dim = (model_bbox.width()*actual_scale + model_bbox.height()*actual_scale) / 2.0;
      double max_dim = std::max(model_bbox.width*actual_scale, model_bbox.height*actual_scale);
      max_dim *= m_params.delta_location;
      double x = model_pose.projectedCenter().x / max_dim;
      double y = model_pose.projectedCenter().y / max_dim;
      x_bins[0] = (int)floor(x);
      x_bins[1] = x_bins[0] + ((x-x_bins[0]) < 0.5 ? -1 : 1);
      y_bins[0] = (int)floor(y);
      y_bins[1] = y_bins[0] + ((y-y_bins[0]) < 0.5 ? -1 : 1);
      for (unsigned y = 0; y < y_bins.size(); ++y)
      for (unsigned x = 0; x < x_bins.size(); ++x)
      {
        p.x = x_bins[x];
        p.y = y_bins[y];
        points.push_back(p);
      }
    }
  }
Exemplo n.º 12
0
void OpenniGrabber :: run()
{
    m_should_exit = false;
    m_current_image.setCalibration(m_calib_data);
    m_rgbd_image.setCalibration(m_calib_data);

    // Depth
    m_rgbd_image.rawDepthRef() = Mat1f(m_calib_data->raw_depth_size);
    m_rgbd_image.rawDepthRef() = 0.f;
    m_rgbd_image.depthRef() = m_rgbd_image.rawDepthRef();
    m_current_image.rawDepthRef() = Mat1f(m_calib_data->raw_depth_size);
    m_current_image.rawDepthRef() = 0.f;
    m_current_image.depthRef() = m_current_image.rawDepthRef();

    // Color
    if (m_has_rgb)
    {
        m_rgbd_image.rawRgbRef() = Mat3b(m_calib_data->rawRgbSize());
        m_rgbd_image.rawRgbRef() = Vec3b(0,0,0);
        m_rgbd_image.rgbRef() = m_rgbd_image.rawRgbRef();
        m_current_image.rawRgbRef() = Mat3b(m_calib_data->rawRgbSize());
        m_current_image.rawRgbRef() = Vec3b(0,0,0);
        m_current_image.rgbRef() = m_current_image.rawRgbRef();

        m_rgbd_image.rawIntensityRef() = Mat1f(m_calib_data->rawRgbSize());
        m_rgbd_image.rawIntensityRef() = 0.f;
        m_rgbd_image.intensityRef() = m_rgbd_image.rawIntensityRef();
        m_current_image.rawIntensityRef() = Mat1f(m_calib_data->rawRgbSize());
        m_current_image.rawIntensityRef() = 0.f;
        m_current_image.intensityRef() = m_current_image.rawIntensityRef();
    }

    // User tracking
    m_rgbd_image.userLabelsRef() = cv::Mat1b(m_calib_data->raw_depth_size);
    m_rgbd_image.userLabelsRef() = 0u;

    if (m_track_users)
        m_rgbd_image.setSkeletonData(new Skeleton());

    m_current_image.userLabelsRef() = cv::Mat1b(m_calib_data->raw_depth_size);
    m_current_image.userLabelsRef() = 0u;

    if (m_track_users)
        m_current_image.setSkeletonData(new Skeleton());

    if (m_has_rgb)
    {
        bool mapping_required = m_calib_data->rawRgbSize() != m_calib_data->raw_depth_size;
        if (!mapping_required)
        {
            m_rgbd_image.mappedRgbRef() = m_rgbd_image.rawRgbRef();
            m_rgbd_image.mappedDepthRef() = m_rgbd_image.rawDepthRef();
            m_current_image.mappedRgbRef() = m_current_image.rawRgbRef();
            m_current_image.mappedDepthRef() = m_current_image.rawDepthRef();
        }
        else
        {
            m_rgbd_image.mappedRgbRef() = Mat3b(m_calib_data->raw_depth_size);
            m_rgbd_image.mappedRgbRef() = Vec3b(0,0,0);
            m_rgbd_image.mappedDepthRef() = Mat1f(m_calib_data->rawRgbSize());
            m_rgbd_image.mappedDepthRef() = 0.f;
            m_current_image.mappedRgbRef() = Mat3b(m_calib_data->rawDepthSize());
            m_current_image.mappedRgbRef() = Vec3b(0,0,0);
            m_current_image.mappedDepthRef() = Mat1f(m_calib_data->rawRgbSize());
            m_current_image.mappedDepthRef() = 0.f;
        }
    }

    m_rgbd_image.setCameraSerial(cameraSerial());
    m_current_image.setCameraSerial(cameraSerial());

    xn::SceneMetaData sceneMD;
    xn::DepthMetaData depthMD;
    xn::ImageMetaData rgbMD;
    xn::IRMetaData irMD;

    ImageBayerGRBG bayer_decoder(ImageBayerGRBG::EdgeAware);

    RGBDImage oversampled_image;
    if (m_subsampling_factor != 1)
    {
        oversampled_image.rawDepthRef().create(m_calib_data->rawDepthSize()*m_subsampling_factor);
        oversampled_image.userLabelsRef().create(oversampled_image.rawDepth().size());
    }

    while (!m_should_exit)
    {
        waitForNewEvent();
        ntk_dbg(2) << format("[%x] running iteration", this);

        {
            // OpenNI calls do not seem to be thread safe.
            QMutexLocker ni_locker(&m_ni_mutex);
            waitAndUpdateActiveGenerators();
        }

        if (m_track_users && m_body_event_detector)
            m_body_event_detector->update();

        m_ni_depth_generator.GetMetaData(depthMD);
        if (m_has_rgb)
        {
            if (m_get_infrared)
            {
                m_ni_ir_generator.GetMetaData(irMD);
            }
            else
            {
                m_ni_rgb_generator.GetMetaData(rgbMD);
            }
        }

        RGBDImage& temp_image =
                m_subsampling_factor == 1 ? m_current_image : oversampled_image;

        const XnDepthPixel* pDepth = depthMD.Data();
        ntk_assert((depthMD.XRes() == temp_image.rawDepth().cols)
                   && (depthMD.YRes() == temp_image.rawDepth().rows),
                   "Invalid image size.");

        // Convert to meters.
        const float depth_correction_factor = 1.0;
        float* raw_depth_ptr = temp_image.rawDepthRef().ptr<float>();
        for (int i = 0; i < depthMD.XRes()*depthMD.YRes(); ++i)
            raw_depth_ptr[i] = depth_correction_factor * pDepth[i]/1000.f;

        if (m_has_rgb)
        {
            if (m_get_infrared)
            {
                const XnGrayscale16Pixel* pImage = irMD.Data();
                m_current_image.rawIntensityRef().create(irMD.YRes(), irMD.XRes());
                float* raw_img_ptr = m_current_image.rawIntensityRef().ptr<float>();
                for (int i = 0; i < irMD.XRes()*irMD.YRes(); ++i)
                {
                    raw_img_ptr[i] = pImage[i];
                }
            }
            else
            {
                if (m_custom_bayer_decoding)
                {
                    uchar* raw_rgb_ptr = m_current_image.rawRgbRef().ptr<uchar>();
                    bayer_decoder.fillRGB(rgbMD,
                                          m_current_image.rawRgb().cols, m_current_image.rawRgb().rows,
                                          raw_rgb_ptr);
                    cvtColor(m_current_image.rawRgbRef(), m_current_image.rawRgbRef(), CV_RGB2BGR);
                }
                else
                {
                    const XnUInt8* pImage = rgbMD.Data();
                    ntk_assert(rgbMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24, "Invalid RGB format.");
                    uchar* raw_rgb_ptr = m_current_image.rawRgbRef().ptr<uchar>();
                    for (int i = 0; i < rgbMD.XRes()*rgbMD.YRes()*3; i += 3)
                        for (int k = 0; k < 3; ++k)
                        {
                            raw_rgb_ptr[i+k] = pImage[i+(2-k)];
                        }
                }
            }
        }

        if (m_track_users)
        {
            m_ni_user_generator.GetUserPixels(0, sceneMD);
            uchar* user_mask_ptr = temp_image.userLabelsRef().ptr<uchar>();
            const XnLabel* pLabel = sceneMD.Data();
            for (int i = 0; i < sceneMD.XRes()*sceneMD.YRes(); ++i)
            {
                user_mask_ptr[i] = pLabel[i];
            }

            XnUserID user_ids[15];
            XnUInt16 num_users = 15;
            m_ni_user_generator.GetUsers(user_ids, num_users);

            // FIXME: only one user supported.
            for (int i = 0; i < num_users; ++i)
            {
                XnUserID user_id = user_ids[i];
                if (m_ni_user_generator.GetSkeletonCap().IsTracking(user_id))
                {
                    m_current_image.skeletonRef()->computeJoints(user_id, m_ni_user_generator, m_ni_depth_generator);
                    break;
                }
            }
        }

        if (m_subsampling_factor != 1)
        {
            // Cannot use interpolation here, since this would
            // spread the invalid depth values.
            cv::resize(oversampled_image.rawDepth(),
                       m_current_image.rawDepthRef(),
                       m_current_image.rawDepth().size(),
                       0, 0, INTER_NEAREST);
            // we have to repeat this, since resize can change the pointer.
            // m_current_image.depthRef() = m_current_image.rawDepthRef();
            cv::resize(oversampled_image.userLabels(),
                       m_current_image.userLabelsRef(),
                       m_current_image.userLabels().size(),
                       0, 0, INTER_NEAREST);
        }

        m_current_image.setTimestamp(getCurrentTimestamp());

        {
            QWriteLocker locker(&m_lock);
            m_current_image.swap(m_rgbd_image);
        }

        advertiseNewFrame();
    }
    ntk_dbg(1) << format("[%x] finishing", this);
}
Exemplo n.º 13
0
bool OpenniGrabber :: connectToDevice()
{
    QMutexLocker ni_locker(&m_ni_mutex);
    ntk_dbg(1) << format("[Kinect %x] connecting", this);

    XnStatus status;
    xn::NodeInfoList device_node_info_list;
    status = m_driver.niContext().EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, device_node_info_list);
    m_driver.checkXnError(status, "Cannot enumerate devices.");

    xn::NodeInfoList::Iterator nodeIt = device_node_info_list.Begin();
    for (; nodeIt != device_node_info_list.End (); ++nodeIt)
    {
        xn::NodeInfo deviceInfo = *nodeIt;
        if (m_driver.deviceInfo(m_camera_id).creation_info == deviceInfo.GetCreationInfo())
            break;
    }
    ntk_throw_exception_if(nodeIt == device_node_info_list.End (), "Cannot find device.");

    setCameraSerial(m_driver.deviceInfo(m_camera_id).serial);

    xn::NodeInfo deviceInfo = *nodeIt;
    ntk_assert(m_driver.deviceInfo(m_camera_id).creation_info == deviceInfo.GetCreationInfo(), "Inconsistent nodes!");
    status = m_driver.niContext().CreateProductionTree(deviceInfo, m_ni_device);
    m_driver.checkXnError(status, "Create Device Node");
    const XnProductionNodeDescription& description = deviceInfo.GetDescription();
    ntk_dbg(1) << format("device %d: vendor %s name %s, instance %s, serial %s",
                         m_camera_id,
                         description.strVendor, description.strName, deviceInfo.GetInstanceName(), m_driver.deviceInfo(m_camera_id).serial.c_str());
    xn::Query query;
    query.AddNeededNode(deviceInfo.GetInstanceName());

    bool is_kinect = (std::string("SensorKinect") == description.strName) || (std::string("SensorV2") == description.strName);

    status = m_ni_depth_generator.Create(m_driver.niContext(), &query);
    m_driver.checkXnError(status, "Create depth generator");
    XnMapOutputMode depth_mode;
    depth_mode.nXRes = 640;
    depth_mode.nYRes = 480;
    depth_mode.nFPS = 30;
    m_ni_depth_generator.SetMapOutputMode(depth_mode);

    status = m_driver.niContext().SetGlobalMirror(m_mirrored);
    m_driver.checkXnError(status, "Mirror");

    status = m_ni_rgb_generator.Create(m_driver.niContext(), &query);
    if (status != XN_STATUS_OK)
    {
        m_has_rgb = false;
        ntk_dbg(1) << "Warning: no color stream!";
    }

    if (m_has_rgb)
    {
        status = m_ni_rgb_generator.SetIntProperty ("Resolution", 1);
        m_driver.checkXnError(status, "Resolution");

        XnMapOutputMode rgb_mode;
        if (m_high_resolution)
        {
            rgb_mode.nFPS = 15;
            rgb_mode.nXRes = 1280;
            rgb_mode.nYRes = 1024;
        }
        else
        {
            rgb_mode.nXRes = 640;
            rgb_mode.nYRes = 480;
            rgb_mode.nFPS = 30;
        }
        status = m_ni_rgb_generator.SetMapOutputMode(rgb_mode);
        m_driver.checkXnError(status, "Set RGB mode");

        ntk_throw_exception_if(!m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT), "Cannot register images.");
        m_ni_depth_generator.GetAlternativeViewPointCap().SetViewPoint(m_ni_rgb_generator);

        if (!is_kinect && m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_FRAME_SYNC))
        {
            ntk_dbg(1) << "Frame Sync supported.";
            status = m_ni_depth_generator.GetFrameSyncCap ().FrameSyncWith (m_ni_rgb_generator);
            m_driver.checkXnError(status, "Set Frame Sync");
        }

        status = m_ni_ir_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create infrared generator");
        XnMapOutputMode ir_mode;
        ir_mode.nFPS = 15;
        ir_mode.nXRes = 1280;
        ir_mode.nYRes = 1024;
        m_ni_ir_generator.SetMapOutputMode(ir_mode);
    }

    if (m_track_users)
    {
        status = m_ni_user_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create user generator");
        status = m_ni_hands_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create hands generator");
        status = m_ni_gesture_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create gestures generator");
    }

    if (m_track_users)
    {
        ntk_throw_exception_if(!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_SKELETON),
                               "Supplied user generator doesn't support skeleton.");

        XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
        m_ni_user_generator.RegisterUserCallbacks(User_NewUser, User_LostUser, this, hUserCallbacks);
        m_ni_user_generator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, this, hCalibrationCallbacks);

        if (m_ni_user_generator.GetSkeletonCap().NeedPoseForCalibration())
        {
            m_need_pose_to_calibrate = true;
            if (!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
                ntk_throw_exception("Pose required, but not supported\n");
            m_ni_user_generator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, this, hPoseCallbacks);
            m_ni_user_generator.GetSkeletonCap().GetCalibrationPose(m_calibration_pose);
        }

        m_ni_user_generator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

        if (m_body_event_detector)
            m_body_event_detector->initialize(m_driver.niContext(), m_ni_depth_generator);
    }

    status = m_ni_depth_generator.StartGenerating();
    m_driver.checkXnError(status, "Depth::StartGenerating");

    if (m_has_rgb)
    {
        status = m_ni_rgb_generator.StartGenerating();
        m_driver.checkXnError(status, "RGB::StartGenerating");

        if (m_custom_bayer_decoding)
        {
            // Grayscale to get raw Bayer pattern.
            status = m_ni_rgb_generator.SetIntProperty ("InputFormat", 6);
            m_driver.checkXnError(status, "Change input format");

            status = m_ni_rgb_generator.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT);
            m_driver.checkXnError(status, "Change pixel format");
        }
    }

    if (m_track_users)
    {
        status = m_ni_user_generator.StartGenerating();
        m_driver.checkXnError(status, "User::StartGenerating");

        status = m_ni_hands_generator.StartGenerating();
        m_driver.checkXnError(status, "Hands::StartGenerating");

        status = m_ni_gesture_generator.StartGenerating();
        m_driver.checkXnError(status, "Gesture::StartGenerating");
    }

    waitAndUpdateActiveGenerators();
    if (!m_calib_data)
        estimateCalibration();

    m_connected = true;
    return true;
}
Exemplo n.º 14
0
void MeshRenderer :: renderToImage(cv::Mat4b& image, int flags)
{
    ntk_assert(m_vertex_buffer_object.initialized,
               "Renderer not initialized! Call setPose and setMesh.");

    ntk::TimeCount tc_gl_current("make_current", 2);
    m_pbuffer->makeCurrent();
    tc_gl_current.stop();

    ntk::TimeCount tc_gl_render("gl_render", 2);

    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    if (flags & WIREFRAME)
        glPolygonMode( GL_FRONT_AND_BACK, GL_LINE );

    VertexBufferObject& vbo = m_vertex_buffer_object;
    glBindBufferARB(GL_ARRAY_BUFFER_ARB, vbo.vertex_id);

    if (vbo.has_texcoords)
    {
        glEnable(GL_TEXTURE_2D);
        glBindTexture(GL_TEXTURE_2D, vbo.texture_id);
    }
    else
    {
        glDisable(GL_TEXTURE_2D);
    }

    if (vbo.has_texcoords)
        glEnableClientState(GL_TEXTURE_COORD_ARRAY);

    if (vbo.has_color)
        glEnableClientState(GL_COLOR_ARRAY);
    else
        glColor3f(1.0f,0.f,0.f);

    glEnableClientState(GL_VERTEX_ARRAY);

    glVertexPointer(3, GL_FLOAT, 0, 0);
    if (vbo.has_color)
        glColorPointer(3, GL_UNSIGNED_BYTE, 0, ((char*) NULL) + vbo.color_offset);

    if (vbo.has_texcoords)
        glTexCoordPointer(2, GL_FLOAT, 0, ((char*) NULL) + vbo.texture_offset);

    if (vbo.has_faces)
    {
        glBindBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, vbo.faces_id);
        glNormal3f(0, 0, 1);
        glDrawElements(GL_TRIANGLES, vbo.nb_faces*3, GL_UNSIGNED_INT, 0);

        if (flags & OUTLINE)
        {
            // Draw again for outline.
            glPolygonMode( GL_FRONT_AND_BACK, GL_LINE );
            glDrawElements(GL_TRIANGLES, vbo.nb_faces*3, GL_UNSIGNED_INT, 0);
            glPolygonMode( GL_FRONT_AND_BACK, GL_FILL );
        }
    }
    else
    {
        glDrawArrays(GL_POINTS,
                     0,
                     vbo.nb_vertices);
    }

    glDisableClientState(GL_VERTEX_ARRAY);

    if (vbo.has_color)
        glDisableClientState(GL_COLOR_ARRAY);

    if (vbo.has_texcoords)
        glDisableClientState(GL_TEXTURE_COORD_ARRAY);

    // bind with 0, so, switch back to normal pointer operation
    glBindBufferARB(GL_ARRAY_BUFFER_ARB, 0);

    if (vbo.has_faces)
    {
        glBindBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, 0);
    }

    glFinish();
    tc_gl_render.stop();

    ntk::TimeCount tc_image("to_image", 2);
    QImage qimage = m_pbuffer->toImage();
    tc_image.stop();

    ntk::TimeCount tc_depth_buffer("compute_depth_buffer", 2);
    computeDepthBuffer();
    tc_depth_buffer.stop();

    ntk::TimeCount tc_convert("convert_to_cv", 2);
    for (int r = 0; r < qimage.height(); ++r)
        for (int c = 0; c < qimage.width(); ++c)
        {
            QRgb pixel = qimage.pixel(c,r);
            Vec4b color (qBlue(pixel), qGreen(pixel), qRed(pixel), qAlpha(pixel));
            m_color_buffer(r,c) = color;
            float a = qAlpha(pixel)/255.f;
            if (a > 0)
            {
                Vec4b old_color = image(r,c);
                image(r,c) = Vec4b(old_color[0]*(1-a) + color[0]*a,
                                   old_color[1]*(1-a) + color[1]*a,
                                   old_color[2]*(1-a) + color[2]*a,
                                   255);
            }
        }
    tc_convert.stop();
}
Exemplo n.º 15
0
void NiteRGBDGrabber :: run()
{
  m_should_exit = false;
  m_current_image.setCalibration(m_calib_data);
  m_rgbd_image.setCalibration(m_calib_data);

  m_rgbd_image.rawRgbRef() = Mat3b(m_calib_data->rawRgbSize());
  m_rgbd_image.rawDepthRef() = Mat1f(m_calib_data->raw_depth_size);
  m_rgbd_image.rawIntensityRef() = Mat1f(m_calib_data->rawRgbSize());

  m_rgbd_image.rawIntensityRef() = 0.f;
  m_rgbd_image.rawDepthRef() = 0.f;
  m_rgbd_image.rawRgbRef() = Vec3b(0,0,0);

  m_rgbd_image.rgbRef() = m_rgbd_image.rawRgbRef();
  m_rgbd_image.depthRef() = m_rgbd_image.rawDepthRef();
  m_rgbd_image.intensityRef() = m_rgbd_image.rawIntensityRef();

  m_rgbd_image.userLabelsRef() = cv::Mat1b(m_calib_data->raw_depth_size);
  m_rgbd_image.userLabelsRef() = 0u;

  m_rgbd_image.setSkeletonData(new Skeleton());

  m_current_image.rawRgbRef() = Mat3b(m_calib_data->rawRgbSize());
  m_current_image.rawRgbRef() = Vec3b(0,0,0);
  m_current_image.rawDepthRef() = Mat1f(m_calib_data->raw_depth_size);
  m_current_image.rawDepthRef() = 0.f;
  m_current_image.rawIntensityRef() = Mat1f(m_calib_data->rawRgbSize());
  m_current_image.rawIntensityRef() = 0.f;

  m_current_image.rgbRef() = m_current_image.rawRgbRef();
  m_current_image.depthRef() = m_current_image.rawDepthRef();
  m_current_image.intensityRef() = m_current_image.rawIntensityRef();

  m_current_image.userLabelsRef() = cv::Mat1b(m_calib_data->raw_depth_size);
  m_current_image.userLabelsRef() = 0u;

  m_current_image.setSkeletonData(new Skeleton());

  bool mapping_required = m_calib_data->rawRgbSize() != m_calib_data->raw_depth_size;
  if (!mapping_required)
  {
    m_rgbd_image.mappedRgbRef() = m_rgbd_image.rawRgbRef();
    m_rgbd_image.mappedDepthRef() = m_rgbd_image.rawDepthRef();
    m_current_image.mappedRgbRef() = m_current_image.rawRgbRef();
    m_current_image.mappedDepthRef() = m_current_image.rawDepthRef();
  }
  else
  {
    m_rgbd_image.mappedRgbRef() = Mat3b(m_calib_data->raw_depth_size);
    m_rgbd_image.mappedRgbRef() = Vec3b(0,0,0);
    m_rgbd_image.mappedDepthRef() = Mat1f(m_calib_data->rawRgbSize());
    m_rgbd_image.mappedDepthRef() = 0.f;
    m_current_image.mappedRgbRef() = Mat3b(m_calib_data->raw_depth_size);
    m_current_image.mappedRgbRef() = Vec3b(0,0,0);
    m_current_image.mappedDepthRef() = Mat1f(m_calib_data->rawRgbSize());
    m_current_image.mappedDepthRef() = 0.f;
  }

  xn::SceneMetaData sceneMD;
  xn::DepthMetaData depthMD;
  xn::ImageMetaData rgbMD;

  ImageBayerGRBG bayer_decoder(ImageBayerGRBG::EdgeAware);

  while (!m_should_exit)
  {
    waitForNewEvent();
    m_ni_context.WaitAndUpdateAll();
    if (m_body_event_detector)
      m_body_event_detector->update();
    m_ni_depth_generator.GetMetaData(depthMD);
    m_ni_rgb_generator.GetMetaData(rgbMD);
      if (enable_skeleton_tracking) 
          m_ni_user_generator.GetUserPixels(0, sceneMD);

    const XnDepthPixel* pDepth = depthMD.Data();
    ntk_assert((depthMD.XRes() == m_current_image.rawDepth().cols)
               && (depthMD.YRes() == m_current_image.rawDepth().rows),
               "Invalid image size.");

    // Convert to meters.
    float* raw_depth_ptr = m_current_image.rawDepthRef().ptr<float>();
    for (int i = 0; i < depthMD.XRes()*depthMD.YRes(); ++i)
      raw_depth_ptr[i] = pDepth[i]/1000.f;

    if (m_custom_bayer_decoding)
    {
      uchar* raw_rgb_ptr = m_current_image.rawRgbRef().ptr<uchar>();
      bayer_decoder.fillRGB(rgbMD,
                            m_current_image.rawRgb().cols, m_current_image.rawRgb().rows,
                            raw_rgb_ptr);
      cvtColor(m_current_image.rawRgbRef(), m_current_image.rawRgbRef(), CV_RGB2BGR);
    }
    else
    {
      const XnUInt8* pImage = rgbMD.Data();
      ntk_assert(rgbMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24, "Invalid RGB format.");
      uchar* raw_rgb_ptr = m_current_image.rawRgbRef().ptr<uchar>();
      for (int i = 0; i < rgbMD.XRes()*rgbMD.YRes()*3; i += 3)
        for (int k = 0; k < 3; ++k)
        {
          raw_rgb_ptr[i+k] = pImage[i+(2-k)];
        }
    }

      if (enable_skeleton_tracking)  {
          uchar* user_mask_ptr = m_current_image.userLabelsRef().ptr<uchar>();
          const XnLabel* pLabel = sceneMD.Data();
          for (int i = 0; i < sceneMD.XRes()*sceneMD.YRes(); ++i)
          {
              user_mask_ptr[i] = pLabel[i];
          }
          
          XnUserID user_ids[15];
          XnUInt16 num_users = 15;
          m_ni_user_generator.GetUsers(user_ids, num_users);
          
          // FIXME: only one user supported.
          for (int i = 0; i < num_users; ++i)
          {
              XnUserID user_id = user_ids[i];
              if (m_ni_user_generator.GetSkeletonCap().IsTracking(user_id))
              {
                  m_current_image.skeletonRef()->computeJoints(user_id, m_ni_user_generator, m_ni_depth_generator);
                  break;
              }
          }   
      }

    {
      QWriteLocker locker(&m_lock);
      m_current_image.swap(m_rgbd_image);
    }

    advertiseNewFrame();
  }
}
Exemplo n.º 16
0
void RGBDImage :: loadFromFile(const std::string& dir,
                               const RGBDCalibration* calib)
{
    ntk_assert(0, "not implemented.");
}