コード例 #1
0
  void SiftObjectDetector :: setObjectDatabase(const ObjectDatabasePtr& db)
  {
    super::setObjectDatabase(db);

    if (m_sift_indexer) delete m_sift_indexer;
    if (!db) return;

    if (m_params.sift_database_type == "simple")
      m_sift_indexer = new SimpleFeatureIndexer(objectDatabase(), m_params.feature_type);
#ifdef NESTK_DISABLED_ADS_NOT_YET_IMPORTED
    else if (m_params.sift_database_type == "lsh")
      m_sift_indexer = new FeatureIndexerLSH(objectDatabase(), m_params.feature_type);
#endif
    else if (m_params.sift_database_type == "kdt")
      m_sift_indexer = new FeatureIndexerKdt(objectDatabase(), m_params.feature_type);
#ifdef NESTK_DISABLED_ADS_NOT_YET_IMPORTED
    else if (m_params.sift_database_type == "with_clusters")
      m_sift_indexer = new FeatureIndexerWithClusters(objectDatabase(), m_params.feature_type);
    else if (m_params.sift_database_type == "with_clusters_no_mean")
      m_sift_indexer = new FeatureIndexerWithClustersNoMean(objectDatabase(), m_params.feature_type);
#endif
    else
      ntk_throw_exception("Invalid sift database type.");
    TimeCount pc ("loading sift indexer");
    m_sift_indexer->loadOrBuild();
    pc.stop();
  }
コード例 #2
0
OpenniGrabber :: OpenniGrabber(OpenniDriver& driver, const std::string& camera_serial) :
    m_driver(driver),
    m_camera_id(-1),
    m_camera_serial(camera_serial),
    m_subsampling_factor(1),
    m_need_pose_to_calibrate(false),
    m_max_num_users(15),
    m_body_event_detector(0),
    m_high_resolution(false),
    m_mirrored(false),
    m_custom_bayer_decoding(false),
    m_xml_config_file(DEFAULT_XML_CONFIG_FILE),
    m_track_users(true),
    m_get_infrared(false),
    m_has_rgb(true)
{
    for (size_t i = 0; i < driver.numDevices(); ++i)
    {
        if (driver.deviceInfo(i).serial == camera_serial)
        {
            m_camera_id = i;
            break;
        }
    }

    if (m_camera_id < 0)
    {
        ntk_throw_exception("Could not find any device with serial " + camera_serial);
    }

    setDefaultBayerMode();
}
コード例 #3
0
void OpenniGrabber :: setSubsamplingFactor(int factor)
{
    if (640 % factor > 0 || 480 % factor > 0)
    {
        ntk_throw_exception("Invalid subsampling factor. Must give a round subsampled size.");
    }
    m_subsampling_factor = factor;
}
コード例 #4
0
ファイル: nite_rgbd_grabber.cpp プロジェクト: jonaswitt/nestk
void NiteRGBDGrabber :: check_error(const XnStatus& status, const char* what) const
{
  if (status != XN_STATUS_OK)
  {
    ntk_dbg(0) << "[ERROR] " << cv::format("%s failed: %s\n", what, xnGetStatusString(status));
    ntk_throw_exception("Error in NiteRGBDGrabber.");
  }
}
コード例 #5
0
ファイル: xml_serializable.cpp プロジェクト: Makeroni/nestk
 void XmlSerializable ::
 loadFromXml(const QString& s, const char* element_tag, XmlContext* context)
 {
   XMLResults results;
   XMLNode e = XMLNode::parseString(s.toUtf8(), element_tag, &results);
   if (e.isEmpty())
     ntk_throw_exception("Could not parse xml file: " + e.getError(results.error));
   loadFromXml(e, element_tag, context);
 }
コード例 #6
0
ファイル: xml_serializable.cpp プロジェクト: Makeroni/nestk
 void XmlSerializable ::
 loadFromXml(const QFileInfo& fileinfo, const char* element_tag, XmlContext* context)
 {
   ntk_throw_exception_if(!fileinfo.isFile(), "Xml file does not exist.");
   XMLResults results;
   XMLNode e = XMLNode::parseFile(fileinfo.absoluteFilePath().toUtf8(), element_tag, &results);
   if (e.isEmpty())
     ntk_throw_exception("Could not parse xml file: " + e.getError(results.error));
   loadFromXml(e, element_tag, context);
 }
コード例 #7
0
ファイル: xml_serializable.cpp プロジェクト: Makeroni/nestk
 void XmlSerializable ::
 saveAsXml(const QFileInfo& fileinfo, const char* element_tag) const
 {
   QString s = getAsXml(element_tag);
   if (s.isNull()) ntk_throw_exception("Empty xml document.");
   QFile file (fileinfo.absoluteFilePath());
   qDebug() << fileinfo.absoluteFilePath();
   file.open(QFile::WriteOnly);
   ntk_throw_exception_if(!file.isWritable(),
                          "Cannot write into file "
                          + fileinfo.absoluteFilePath().toStdString());
   file.write(s.toUtf8());
   file.close();
 }
コード例 #8
0
ファイル: object_match.cpp プロジェクト: Makeroni/nestk
  void ObjectMatch :: loadFromXmlElement(const XMLNode& element, XmlContext* context)
  {
    const ObjectDetector* finder = dynamic_cast<const ObjectDetector*>(context);    
    ntk_assert(finder != 0, "Bad context.");
    m_finder_data = &finder->data();
    std::string model_name;
    loadFromXmlAttribute(element, "model", model_name);
    const VisualObject* obj = finder->objectDatabase().visualObject(model_name);
    if (obj == 0) ntk_throw_exception("Unknown model: " + model_name);

    int view_id = 0;
    loadFromXmlAttribute(element, "view", view_id);
    
    ObjectPosePtr p (new ObjectPose(*m_finder_data, obj->views()[view_id], ntk::AffineTransform::identity));
    loadFromXmlChild(element, "pose", *p);
    m_pose = p;
  }
コード例 #9
0
bool OpenniGrabber :: connectToDevice()
{
    QMutexLocker ni_locker(&m_ni_mutex);
    ntk_dbg(1) << format("[Kinect %x] connecting", this);

    XnStatus status;
    xn::NodeInfoList device_node_info_list;
    status = m_driver.niContext().EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, device_node_info_list);
    m_driver.checkXnError(status, "Cannot enumerate devices.");

    xn::NodeInfoList::Iterator nodeIt = device_node_info_list.Begin();
    for (; nodeIt != device_node_info_list.End (); ++nodeIt)
    {
        xn::NodeInfo deviceInfo = *nodeIt;
        if (m_driver.deviceInfo(m_camera_id).creation_info == deviceInfo.GetCreationInfo())
            break;
    }
    ntk_throw_exception_if(nodeIt == device_node_info_list.End (), "Cannot find device.");

    setCameraSerial(m_driver.deviceInfo(m_camera_id).serial);

    xn::NodeInfo deviceInfo = *nodeIt;
    ntk_assert(m_driver.deviceInfo(m_camera_id).creation_info == deviceInfo.GetCreationInfo(), "Inconsistent nodes!");
    status = m_driver.niContext().CreateProductionTree(deviceInfo, m_ni_device);
    m_driver.checkXnError(status, "Create Device Node");
    const XnProductionNodeDescription& description = deviceInfo.GetDescription();
    ntk_dbg(1) << format("device %d: vendor %s name %s, instance %s, serial %s",
                         m_camera_id,
                         description.strVendor, description.strName, deviceInfo.GetInstanceName(), m_driver.deviceInfo(m_camera_id).serial.c_str());
    xn::Query query;
    query.AddNeededNode(deviceInfo.GetInstanceName());

    bool is_kinect = (std::string("SensorKinect") == description.strName) || (std::string("SensorV2") == description.strName);

    status = m_ni_depth_generator.Create(m_driver.niContext(), &query);
    m_driver.checkXnError(status, "Create depth generator");
    XnMapOutputMode depth_mode;
    depth_mode.nXRes = 640;
    depth_mode.nYRes = 480;
    depth_mode.nFPS = 30;
    m_ni_depth_generator.SetMapOutputMode(depth_mode);

    status = m_driver.niContext().SetGlobalMirror(m_mirrored);
    m_driver.checkXnError(status, "Mirror");

    status = m_ni_rgb_generator.Create(m_driver.niContext(), &query);
    if (status != XN_STATUS_OK)
    {
        m_has_rgb = false;
        ntk_dbg(1) << "Warning: no color stream!";
    }

    if (m_has_rgb)
    {
        status = m_ni_rgb_generator.SetIntProperty ("Resolution", 1);
        m_driver.checkXnError(status, "Resolution");

        XnMapOutputMode rgb_mode;
        if (m_high_resolution)
        {
            rgb_mode.nFPS = 15;
            rgb_mode.nXRes = 1280;
            rgb_mode.nYRes = 1024;
        }
        else
        {
            rgb_mode.nXRes = 640;
            rgb_mode.nYRes = 480;
            rgb_mode.nFPS = 30;
        }
        status = m_ni_rgb_generator.SetMapOutputMode(rgb_mode);
        m_driver.checkXnError(status, "Set RGB mode");

        ntk_throw_exception_if(!m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT), "Cannot register images.");
        m_ni_depth_generator.GetAlternativeViewPointCap().SetViewPoint(m_ni_rgb_generator);

        if (!is_kinect && m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_FRAME_SYNC))
        {
            ntk_dbg(1) << "Frame Sync supported.";
            status = m_ni_depth_generator.GetFrameSyncCap ().FrameSyncWith (m_ni_rgb_generator);
            m_driver.checkXnError(status, "Set Frame Sync");
        }

        status = m_ni_ir_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create infrared generator");
        XnMapOutputMode ir_mode;
        ir_mode.nFPS = 15;
        ir_mode.nXRes = 1280;
        ir_mode.nYRes = 1024;
        m_ni_ir_generator.SetMapOutputMode(ir_mode);
    }

    if (m_track_users)
    {
        status = m_ni_user_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create user generator");
        status = m_ni_hands_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create hands generator");
        status = m_ni_gesture_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create gestures generator");
    }

    if (m_track_users)
    {
        ntk_throw_exception_if(!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_SKELETON),
                               "Supplied user generator doesn't support skeleton.");

        XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
        m_ni_user_generator.RegisterUserCallbacks(User_NewUser, User_LostUser, this, hUserCallbacks);
        m_ni_user_generator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, this, hCalibrationCallbacks);

        if (m_ni_user_generator.GetSkeletonCap().NeedPoseForCalibration())
        {
            m_need_pose_to_calibrate = true;
            if (!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
                ntk_throw_exception("Pose required, but not supported\n");
            m_ni_user_generator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, this, hPoseCallbacks);
            m_ni_user_generator.GetSkeletonCap().GetCalibrationPose(m_calibration_pose);
        }

        m_ni_user_generator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

        if (m_body_event_detector)
            m_body_event_detector->initialize(m_driver.niContext(), m_ni_depth_generator);
    }

    status = m_ni_depth_generator.StartGenerating();
    m_driver.checkXnError(status, "Depth::StartGenerating");

    if (m_has_rgb)
    {
        status = m_ni_rgb_generator.StartGenerating();
        m_driver.checkXnError(status, "RGB::StartGenerating");

        if (m_custom_bayer_decoding)
        {
            // Grayscale to get raw Bayer pattern.
            status = m_ni_rgb_generator.SetIntProperty ("InputFormat", 6);
            m_driver.checkXnError(status, "Change input format");

            status = m_ni_rgb_generator.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT);
            m_driver.checkXnError(status, "Change pixel format");
        }
    }

    if (m_track_users)
    {
        status = m_ni_user_generator.StartGenerating();
        m_driver.checkXnError(status, "User::StartGenerating");

        status = m_ni_hands_generator.StartGenerating();
        m_driver.checkXnError(status, "Hands::StartGenerating");

        status = m_ni_gesture_generator.StartGenerating();
        m_driver.checkXnError(status, "Gesture::StartGenerating");
    }

    waitAndUpdateActiveGenerators();
    if (!m_calib_data)
        estimateCalibration();

    m_connected = true;
    return true;
}
コード例 #10
0
ファイル: nite_rgbd_grabber.cpp プロジェクト: jonaswitt/nestk
void NiteRGBDGrabber :: initialize()
{
    xn::EnumerationErrors errors;    
//    XnStatus status = m_ni_context.InitFromXmlFile(cfg.c_str(), &errors);
    
    XnStatus status = m_ni_context.Init();
    
    if (status != XN_STATUS_OK) {
        ntk_dbg(0) << "[ERROR] " << xnGetStatusString(status);
        ntk_throw_exception("Could not initialize NITE. Check Log."
                            "Most probable reasons are device not connected or no config/ directory"
                            "in the current directory.");
    }
    
    XnLicense license;
    strcpy(license.strKey, "0KOIk2JeIBYClPWVnMoRKn5cdY4=");
    strcpy(license.strVendor, "PrimeSense");
    status = m_ni_context.AddLicense(license);
    check_error(status, "add license");
        
    // find device & set name: 
    xn::Query devQuery;
    devQuery.SetCreationInfo(kinect_id.c_str());
    check_error(status, "set dev query creation info");

    xn::NodeInfoList device_node_info_list;     
    status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, &devQuery, device_node_info_list, NULL); 
    check_error(status, "device enum");
    
    xn::NodeInfoList::Iterator nodeIt = device_node_info_list.Begin();
    xn::NodeInfo info(*nodeIt); 
    status = info.SetInstanceName("Cam1");    
    check_error(status, "set device name");

    status = m_ni_context.CreateProductionTree(info);
    check_error(status, "create device production tree");
    

    // enumerate depth nodes: 
    xn::Query depthQuery;
    status = depthQuery.AddNeededNode("Cam1");
    check_error(status, "set depth query needed node");

    xn::NodeInfoList depth_node_info_list;     
    status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_DEPTH, &depthQuery, depth_node_info_list, NULL); 
    check_error(status, "depth enum");
    nodeIt = depth_node_info_list.Begin();
    info = xn::NodeInfo(*nodeIt);
    
    status = info.SetInstanceName("Depth1");    
    check_error(status, "set depth name");
        
    status = m_ni_context.CreateProductionTree(info);
    check_error(status, "create depth production tree");
    


    status = info.GetInstance(m_ni_depth_generator);
    check_error(status, "create depth generator instance");

    XnMapOutputMode mode;
    mode.nXRes = 640;
    mode.nYRes = 480;
    mode.nFPS = 30;
    
    status = m_ni_depth_generator.SetMapOutputMode(mode);
    check_error(status, "set depth map mode");
    
    // enumerate rgb nodes: 
    xn::NodeInfoList image_node_info_list; 
    status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_IMAGE, &depthQuery, image_node_info_list, NULL); 
    check_error(status, "image enum");
    nodeIt = image_node_info_list.Begin();
    info = xn::NodeInfo(*nodeIt);
    
    status = info.SetInstanceName("Image1");    
    check_error(status, "set image name");
    
    status = m_ni_context.CreateProductionTree(info);
    check_error(status, "create image production tree");
    
    status = info.GetInstance(m_ni_rgb_generator);
    check_error(status, "create image generator instance");
    
    XnMapOutputMode mode2;
    mode2.nXRes = 640;
    mode2.nYRes = 480;
    mode2.nFPS = 30;
    
    status = m_ni_rgb_generator.SetMapOutputMode(mode2);
    check_error(status, "set rgb map mode");

    if (enable_skeleton_tracking) {
        // enumerate user nodes: 
        xn::NodeInfoList user_node_info_list; 
        status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_USER, &depthQuery, user_node_info_list, NULL); 
        check_error(status, "user enum");
        nodeIt = user_node_info_list.Begin();
        info = xn::NodeInfo(*nodeIt);
        
        status = info.SetInstanceName("User1");    
        check_error(status, "set user name");
        
        status = m_ni_context.CreateProductionTree(info);
        check_error(status, "create user production tree");
        
        status = info.GetInstance(m_ni_user_generator);
        check_error(status, "create user generator instance");   
    }
    
    status = m_ni_context.StartGeneratingAll();
    check_error(status, "StartGenerating");



  if (m_high_resolution)
  {
    XnMapOutputMode rgb_mode;
    rgb_mode.nFPS = 15;
    rgb_mode.nXRes = 1280;
    rgb_mode.nYRes = 1024;
    m_ni_rgb_generator.SetMapOutputMode(rgb_mode);
  }

  if (m_custom_bayer_decoding)
  {
    // Grayscale to get raw Bayer pattern.
    status = m_ni_rgb_generator.SetIntProperty ("InputFormat", 6);
    check_error(status, "Change input format");

    status = m_ni_rgb_generator.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT);
    check_error(status, "Change pixel format");
  }

  ntk_ensure(m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT), "Cannot register images.");
  m_ni_depth_generator.GetAlternativeViewPointCap().SetViewPoint(m_ni_rgb_generator);

    if (enable_skeleton_tracking) {
        ntk_throw_exception_if(!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_SKELETON),
                               "Supplied user generator doesn't support skeleton.");
        
        XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
        m_ni_user_generator.RegisterUserCallbacks(User_NewUser, User_LostUser, this, hUserCallbacks);
        m_ni_user_generator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, this, hCalibrationCallbacks);
        
        if (m_ni_user_generator.GetSkeletonCap().NeedPoseForCalibration())
        {
            m_need_pose_to_calibrate = true;
            if (!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
                ntk_throw_exception("Pose required, but not supported\n");
            m_ni_user_generator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, this, hPoseCallbacks);
            m_ni_user_generator.GetSkeletonCap().GetCalibrationPose(m_calibration_pose);
        }
        
        m_ni_user_generator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
        
        if (m_body_event_detector)
            m_body_event_detector->initialize(m_ni_context, m_ni_depth_generator);        
    }

  status = m_ni_context.StartGeneratingAll();
  check_error(status, "StartGenerating");

  m_ni_context.WaitAndUpdateAll();
  estimateCalibration();
}
コード例 #11
0
void NiteRGBDGrabber :: initialize()
{
  const char* config_file = "config/NestkConfig.xml";

  xn::EnumerationErrors errors;
  XnStatus status = m_ni_context.InitFromXmlFile(config_file, &errors);
  if (status != XN_STATUS_OK)
  {
    ntk_dbg(0) << "[ERROR] " << xnGetStatusString(status);
    ntk_throw_exception("Could not initialize NITE. Check Log."
                        "Most probable reasons are device not connected or no config/ directory"
                        "in the current directory.");
  }

  status = m_ni_context.FindExistingNode(XN_NODE_TYPE_DEPTH, m_ni_depth_generator);
  check_error(status, "Find depth generator");

  status = m_ni_context.FindExistingNode(XN_NODE_TYPE_USER, m_ni_user_generator);
  check_error(status, "Find user generator");

  status = m_ni_context.FindExistingNode(XN_NODE_TYPE_IMAGE, m_ni_rgb_generator);
  check_error(status, "Find image generator");

  if (m_high_resolution)
  {
    XnMapOutputMode rgb_mode;
    rgb_mode.nFPS = 15;
    rgb_mode.nXRes = 1280;
    rgb_mode.nYRes = 1024;
    m_ni_rgb_generator.SetMapOutputMode(rgb_mode);
  }

  if (m_custom_bayer_decoding)
  {
    // Grayscale to get raw Bayer pattern.
    status = m_ni_rgb_generator.SetIntProperty ("InputFormat", 6);
    check_error(status, "Change input format");

    status = m_ni_rgb_generator.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT);
    check_error(status, "Change pixel format");
  }

  ntk_ensure(m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT), "Cannot register images.");
  m_ni_depth_generator.GetAlternativeViewPointCap().SetViewPoint(m_ni_rgb_generator);

  ntk_throw_exception_if(!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_SKELETON),
                         "Supplied user generator doesn't support skeleton.");

  XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
  m_ni_user_generator.RegisterUserCallbacks(User_NewUser, User_LostUser, this, hUserCallbacks);
  m_ni_user_generator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, this, hCalibrationCallbacks);

  if (m_ni_user_generator.GetSkeletonCap().NeedPoseForCalibration())
  {
    m_need_pose_to_calibrate = true;
    if (!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
      ntk_throw_exception("Pose required, but not supported\n");
    m_ni_user_generator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, this, hPoseCallbacks);
    m_ni_user_generator.GetSkeletonCap().GetCalibrationPose(m_calibration_pose);
  }

  m_ni_user_generator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

  if (m_body_event_detector)
    m_body_event_detector->initialize(m_ni_context, m_ni_depth_generator);

  status = m_ni_context.StartGeneratingAll();
  check_error(status, "StartGenerating");

  m_ni_context.WaitAndUpdateAll();
  estimateCalibration();
}