示例#1
0
void GPUSiftDetector::operator ()(const Mat1b &img,
                                  const Mat &mask,
                                  std::vector<KeyPoint> &keypoints,
                                  std::vector<float> &descriptors) const
{
  SiftGPU* sift = getSiftGPUInstance();
  ntk_ensure(sift, "Cannot use SiftGPU");

  std::vector<SiftGPU::SiftKeypoint> gpu_keypoints;

  bool ok = sift->RunSIFT(img);

  int n_features = sift->GetFeatureNum();
  gpu_keypoints.resize(n_features);
  descriptors.resize(128*n_features);
  sift->GetFeatureVector(&gpu_keypoints[0], &descriptors[0]);

  keypoints.resize(n_features);
  for (int i = 0; i < n_features; ++i)
  {
    cv::KeyPoint& location = keypoints[i];
    location.pt = Point2f(gpu_keypoints[i].x, gpu_keypoints[i].y);
    location.angle = gpu_keypoints[i].o;
    location.octave = gpu_keypoints[i].s;
    location.size = gpu_keypoints[i].s * 16;
  }
}
示例#2
0
  MeshRenderer :: MeshRenderer(int image_width, int image_height, float transparency)
    : m_mesh(0), m_transparency(transparency)
  {
    m_depth_buffer = cv::Mat1f(Size(image_width, image_height));
    m_color_buffer = cv::Mat4b(Size(image_width, image_height));
    m_context = new QGLContext(QGLFormat(QGL::SampleBuffers|QGL::DepthBuffer|QGL::AlphaChannel));
    m_pbuffer = new QGLPixelBuffer(QSize(image_width, image_height), m_context->format());
    m_pbuffer->makeCurrent();

    GLenum err = glewInit();
    ntk_ensure(err == GLEW_OK, "Could not load GLEW.");
    std::cout << "Status: Using GLEW " << glewGetString(GLEW_VERSION) << std::endl;
    ntk_assert(GLEW_ARB_vertex_buffer_object, "Cannot use vertex buffer.");

    glEnable(GL_DEPTH);
    glEnable(GL_DEPTH_TEST);
    glEnable(GL_CULL_FACE);

    glClearColor(1.0f, 0.0f, 0.0f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  }
bool ntk::IncrementalPoseEstimatorFromFile::estimateCurrentPose()
{
    ntk_ensure(m_new_image.hasDirectory(), "Only works in fake mode!");
    m_current_pose.parseAvsFile((m_new_image.directory() + "/relative_pose.avs").c_str());
    return true;
}
示例#4
0
void NiteRGBDGrabber :: initialize()
{
    xn::EnumerationErrors errors;    
//    XnStatus status = m_ni_context.InitFromXmlFile(cfg.c_str(), &errors);
    
    XnStatus status = m_ni_context.Init();
    
    if (status != XN_STATUS_OK) {
        ntk_dbg(0) << "[ERROR] " << xnGetStatusString(status);
        ntk_throw_exception("Could not initialize NITE. Check Log."
                            "Most probable reasons are device not connected or no config/ directory"
                            "in the current directory.");
    }
    
    XnLicense license;
    strcpy(license.strKey, "0KOIk2JeIBYClPWVnMoRKn5cdY4=");
    strcpy(license.strVendor, "PrimeSense");
    status = m_ni_context.AddLicense(license);
    check_error(status, "add license");
        
    // find device & set name: 
    xn::Query devQuery;
    devQuery.SetCreationInfo(kinect_id.c_str());
    check_error(status, "set dev query creation info");

    xn::NodeInfoList device_node_info_list;     
    status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, &devQuery, device_node_info_list, NULL); 
    check_error(status, "device enum");
    
    xn::NodeInfoList::Iterator nodeIt = device_node_info_list.Begin();
    xn::NodeInfo info(*nodeIt); 
    status = info.SetInstanceName("Cam1");    
    check_error(status, "set device name");

    status = m_ni_context.CreateProductionTree(info);
    check_error(status, "create device production tree");
    

    // enumerate depth nodes: 
    xn::Query depthQuery;
    status = depthQuery.AddNeededNode("Cam1");
    check_error(status, "set depth query needed node");

    xn::NodeInfoList depth_node_info_list;     
    status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_DEPTH, &depthQuery, depth_node_info_list, NULL); 
    check_error(status, "depth enum");
    nodeIt = depth_node_info_list.Begin();
    info = xn::NodeInfo(*nodeIt);
    
    status = info.SetInstanceName("Depth1");    
    check_error(status, "set depth name");
        
    status = m_ni_context.CreateProductionTree(info);
    check_error(status, "create depth production tree");
    


    status = info.GetInstance(m_ni_depth_generator);
    check_error(status, "create depth generator instance");

    XnMapOutputMode mode;
    mode.nXRes = 640;
    mode.nYRes = 480;
    mode.nFPS = 30;
    
    status = m_ni_depth_generator.SetMapOutputMode(mode);
    check_error(status, "set depth map mode");
    
    // enumerate rgb nodes: 
    xn::NodeInfoList image_node_info_list; 
    status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_IMAGE, &depthQuery, image_node_info_list, NULL); 
    check_error(status, "image enum");
    nodeIt = image_node_info_list.Begin();
    info = xn::NodeInfo(*nodeIt);
    
    status = info.SetInstanceName("Image1");    
    check_error(status, "set image name");
    
    status = m_ni_context.CreateProductionTree(info);
    check_error(status, "create image production tree");
    
    status = info.GetInstance(m_ni_rgb_generator);
    check_error(status, "create image generator instance");
    
    XnMapOutputMode mode2;
    mode2.nXRes = 640;
    mode2.nYRes = 480;
    mode2.nFPS = 30;
    
    status = m_ni_rgb_generator.SetMapOutputMode(mode2);
    check_error(status, "set rgb map mode");

    if (enable_skeleton_tracking) {
        // enumerate user nodes: 
        xn::NodeInfoList user_node_info_list; 
        status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_USER, &depthQuery, user_node_info_list, NULL); 
        check_error(status, "user enum");
        nodeIt = user_node_info_list.Begin();
        info = xn::NodeInfo(*nodeIt);
        
        status = info.SetInstanceName("User1");    
        check_error(status, "set user name");
        
        status = m_ni_context.CreateProductionTree(info);
        check_error(status, "create user production tree");
        
        status = info.GetInstance(m_ni_user_generator);
        check_error(status, "create user generator instance");   
    }
    
    status = m_ni_context.StartGeneratingAll();
    check_error(status, "StartGenerating");



  if (m_high_resolution)
  {
    XnMapOutputMode rgb_mode;
    rgb_mode.nFPS = 15;
    rgb_mode.nXRes = 1280;
    rgb_mode.nYRes = 1024;
    m_ni_rgb_generator.SetMapOutputMode(rgb_mode);
  }

  if (m_custom_bayer_decoding)
  {
    // Grayscale to get raw Bayer pattern.
    status = m_ni_rgb_generator.SetIntProperty ("InputFormat", 6);
    check_error(status, "Change input format");

    status = m_ni_rgb_generator.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT);
    check_error(status, "Change pixel format");
  }

  ntk_ensure(m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT), "Cannot register images.");
  m_ni_depth_generator.GetAlternativeViewPointCap().SetViewPoint(m_ni_rgb_generator);

    if (enable_skeleton_tracking) {
        ntk_throw_exception_if(!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_SKELETON),
                               "Supplied user generator doesn't support skeleton.");
        
        XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
        m_ni_user_generator.RegisterUserCallbacks(User_NewUser, User_LostUser, this, hUserCallbacks);
        m_ni_user_generator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, this, hCalibrationCallbacks);
        
        if (m_ni_user_generator.GetSkeletonCap().NeedPoseForCalibration())
        {
            m_need_pose_to_calibrate = true;
            if (!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
                ntk_throw_exception("Pose required, but not supported\n");
            m_ni_user_generator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, this, hPoseCallbacks);
            m_ni_user_generator.GetSkeletonCap().GetCalibrationPose(m_calibration_pose);
        }
        
        m_ni_user_generator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
        
        if (m_body_event_detector)
            m_body_event_detector->initialize(m_ni_context, m_ni_depth_generator);        
    }

  status = m_ni_context.StartGeneratingAll();
  check_error(status, "StartGenerating");

  m_ni_context.WaitAndUpdateAll();
  estimateCalibration();
}
示例#5
0
// Load from a viewXXXX directory.
void RGBDImage :: loadFromDir(const std::string& dir,
                              const RGBDCalibration* calib,
                              RGBDProcessor* processor)
{
    m_directory = dir;

    if (!is_file(dir+"/raw/color.png") && is_file(dir+"/color.png"))
    {
        rawRgbRef() = imread(dir + "/color.png", 1);
        rawRgb().copyTo(rgbRef());
        ntk_ensure(rgbRef().data, ("Could not read color image from " + dir).c_str());
    }
    else
    {
        if (is_file(dir + "/raw/color.png"))
        {
            rawRgbRef() = imread(dir + "/raw/color.png", 1);
            ntk_ensure(rawRgbRef().data, ("Could not read raw color image from " + dir).c_str());
        }

        if (is_file(dir + "/raw/depth.raw"))
        {
            rawDepthRef() = imread_Mat1f_raw(dir + "/raw/depth.raw");
            ntk_ensure(rawDepthRef().data, ("Could not read raw depth image from " + dir).c_str());
        }
        else if (is_file(dir + "/raw/depth.yml"))
        {
            rawDepthRef() = imread_yml(dir + "/raw/depth.yml");
            ntk_ensure(rawDepthRef().data, ("Could not read raw depth image from " + dir).c_str());
        }

        if (is_file(dir + "/raw/amplitude.raw"))
        {
            rawAmplitudeRef() = imread_Mat1f_raw(dir + "/raw/amplitude.raw");
            ntk_ensure(rawAmplitudeRef().data, ("Could not read raw amplitude image from " + dir).c_str());
        }
        if (is_file(dir + "/raw/amplitude.yml"))
        {
            rawAmplitudeRef() = imread_yml(dir + "/raw/amplitude.yml");
            ntk_ensure(rawAmplitudeRef().data, ("Could not read raw amplitude image from " + dir).c_str());
        }
        else if (is_file(dir + "/raw/amplitude.png"))
        {
            rawAmplitudeRef() = imread(dir + "/raw/amplitude.png", 0);
            ntk_ensure(rawAmplitudeRef().data, ("Could not read raw amplitude image from " + dir).c_str());
        }

        if (is_file(dir + "/raw/intensity.raw"))
        {
            rawIntensityRef() = imread_Mat1f_raw(dir + "/raw/intensity.raw");
            ntk_ensure(rawIntensityRef().data, ("Could not read raw intensity image from " + dir).c_str());
        }
        else if (is_file(dir + "/raw/intensity.yml"))
        {
            rawIntensityRef() = imread_yml(dir + "/raw/intensity.yml");
            ntk_ensure(rawIntensityRef().data, ("Could not read raw intensity image from " + dir).c_str());
        }
        else if (is_file(dir + "/raw/intensity.png"))
        {
            rawIntensityRef() = imread(dir + "/raw/intensity.png", 0);
            ntk_ensure(rawIntensityRef().data, ("Could not read raw intensity image from " + dir).c_str());
        }
    }

    setCalibration(calib);
    if (processor)
        processor->processImage(*this);
}
示例#6
0
void NiteRGBDGrabber :: initialize()
{
  const char* config_file = "config/NestkConfig.xml";

  xn::EnumerationErrors errors;
  XnStatus status = m_ni_context.InitFromXmlFile(config_file, &errors);
  if (status != XN_STATUS_OK)
  {
    ntk_dbg(0) << "[ERROR] " << xnGetStatusString(status);
    ntk_throw_exception("Could not initialize NITE. Check Log."
                        "Most probable reasons are device not connected or no config/ directory"
                        "in the current directory.");
  }

  status = m_ni_context.FindExistingNode(XN_NODE_TYPE_DEPTH, m_ni_depth_generator);
  check_error(status, "Find depth generator");

  status = m_ni_context.FindExistingNode(XN_NODE_TYPE_USER, m_ni_user_generator);
  check_error(status, "Find user generator");

  status = m_ni_context.FindExistingNode(XN_NODE_TYPE_IMAGE, m_ni_rgb_generator);
  check_error(status, "Find image generator");

  if (m_high_resolution)
  {
    XnMapOutputMode rgb_mode;
    rgb_mode.nFPS = 15;
    rgb_mode.nXRes = 1280;
    rgb_mode.nYRes = 1024;
    m_ni_rgb_generator.SetMapOutputMode(rgb_mode);
  }

  if (m_custom_bayer_decoding)
  {
    // Grayscale to get raw Bayer pattern.
    status = m_ni_rgb_generator.SetIntProperty ("InputFormat", 6);
    check_error(status, "Change input format");

    status = m_ni_rgb_generator.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT);
    check_error(status, "Change pixel format");
  }

  ntk_ensure(m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT), "Cannot register images.");
  m_ni_depth_generator.GetAlternativeViewPointCap().SetViewPoint(m_ni_rgb_generator);

  ntk_throw_exception_if(!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_SKELETON),
                         "Supplied user generator doesn't support skeleton.");

  XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
  m_ni_user_generator.RegisterUserCallbacks(User_NewUser, User_LostUser, this, hUserCallbacks);
  m_ni_user_generator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, this, hCalibrationCallbacks);

  if (m_ni_user_generator.GetSkeletonCap().NeedPoseForCalibration())
  {
    m_need_pose_to_calibrate = true;
    if (!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
      ntk_throw_exception("Pose required, but not supported\n");
    m_ni_user_generator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, this, hPoseCallbacks);
    m_ni_user_generator.GetSkeletonCap().GetCalibrationPose(m_calibration_pose);
  }

  m_ni_user_generator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

  if (m_body_event_detector)
    m_body_event_detector->initialize(m_ni_context, m_ni_depth_generator);

  status = m_ni_context.StartGeneratingAll();
  check_error(status, "StartGenerating");

  m_ni_context.WaitAndUpdateAll();
  estimateCalibration();
}