Ejemplo n.º 1
0
 void XmlSerializable ::
 loadFromXmlChild(const XMLNode& element, const char* tag,
                  XmlSerializable& object, XmlContext* context)
 {
   XMLNode child = element.getChildNode(tag);
   ntk_throw_exception_if(child.isEmpty(), "Cannot find tag `" + tag + "'");
   object.loadFromXmlElement(child, context);
 }
Ejemplo n.º 2
0
 void XmlSerializable::
 loadFromXml(XMLNode& doc, const char* element_tag, XmlContext * context)
 {
   XMLNode e = doc;
   ntk_throw_exception_if(e.getNameAsString() != element_tag, "Main element is not `" + element_tag + "'.");
   if (context)
     loadFromXmlElement(e, context);
   else
     loadFromXmlElement(e);
 }
Ejemplo n.º 3
0
 void XmlSerializable ::
 loadFromXml(const QFileInfo& fileinfo, const char* element_tag, XmlContext* context)
 {
   ntk_throw_exception_if(!fileinfo.isFile(), "Xml file does not exist.");
   XMLResults results;
   XMLNode e = XMLNode::parseFile(fileinfo.absoluteFilePath().toUtf8(), element_tag, &results);
   if (e.isEmpty())
     ntk_throw_exception("Could not parse xml file: " + e.getError(results.error));
   loadFromXml(e, element_tag, context);
 }
Ejemplo n.º 4
0
 void XmlSerializable ::
 saveAsXml(const QFileInfo& fileinfo, const char* element_tag) const
 {
   QString s = getAsXml(element_tag);
   if (s.isNull()) ntk_throw_exception("Empty xml document.");
   QFile file (fileinfo.absoluteFilePath());
   qDebug() << fileinfo.absoluteFilePath();
   file.open(QFile::WriteOnly);
   ntk_throw_exception_if(!file.isWritable(),
                          "Cannot write into file "
                          + fileinfo.absoluteFilePath().toStdString());
   file.write(s.toUtf8());
   file.close();
 }
Ejemplo n.º 5
0
void ProjectorCalibration :: loadFromFile(const char* filename)
{
    QFileInfo f (filename);
    ntk_throw_exception_if(!f.exists(), "Could not find calibration file.");
    cv::FileStorage calibration_file (filename, CV_STORAGE_READ);
    readMatrix(calibration_file, "proj_intrinsics", intrinsics);
    readMatrix(calibration_file, "proj_distortion", distortion);
    readMatrix(calibration_file, "R", R);
    readMatrix(calibration_file, "T", T);
    cv::Mat1i size_mat;
    readMatrix(calibration_file, "proj_size", size_mat);
    proj_size = cv::Size(size_mat(0,0), size_mat(0,1));
    calibration_file.release();

    pose = new Pose3D();
    pose->toRightCamera(intrinsics, R, T);

    initUndistortRectifyMap(intrinsics, distortion,
                            Mat(), intrinsics, proj_size, CV_16SC2,
                            undistort_map1, undistort_map2);
}
bool OpenniGrabber :: connectToDevice()
{
    QMutexLocker ni_locker(&m_ni_mutex);
    ntk_dbg(1) << format("[Kinect %x] connecting", this);

    XnStatus status;
    xn::NodeInfoList device_node_info_list;
    status = m_driver.niContext().EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, NULL, device_node_info_list);
    m_driver.checkXnError(status, "Cannot enumerate devices.");

    xn::NodeInfoList::Iterator nodeIt = device_node_info_list.Begin();
    for (; nodeIt != device_node_info_list.End (); ++nodeIt)
    {
        xn::NodeInfo deviceInfo = *nodeIt;
        if (m_driver.deviceInfo(m_camera_id).creation_info == deviceInfo.GetCreationInfo())
            break;
    }
    ntk_throw_exception_if(nodeIt == device_node_info_list.End (), "Cannot find device.");

    setCameraSerial(m_driver.deviceInfo(m_camera_id).serial);

    xn::NodeInfo deviceInfo = *nodeIt;
    ntk_assert(m_driver.deviceInfo(m_camera_id).creation_info == deviceInfo.GetCreationInfo(), "Inconsistent nodes!");
    status = m_driver.niContext().CreateProductionTree(deviceInfo, m_ni_device);
    m_driver.checkXnError(status, "Create Device Node");
    const XnProductionNodeDescription& description = deviceInfo.GetDescription();
    ntk_dbg(1) << format("device %d: vendor %s name %s, instance %s, serial %s",
                         m_camera_id,
                         description.strVendor, description.strName, deviceInfo.GetInstanceName(), m_driver.deviceInfo(m_camera_id).serial.c_str());
    xn::Query query;
    query.AddNeededNode(deviceInfo.GetInstanceName());

    bool is_kinect = (std::string("SensorKinect") == description.strName) || (std::string("SensorV2") == description.strName);

    status = m_ni_depth_generator.Create(m_driver.niContext(), &query);
    m_driver.checkXnError(status, "Create depth generator");
    XnMapOutputMode depth_mode;
    depth_mode.nXRes = 640;
    depth_mode.nYRes = 480;
    depth_mode.nFPS = 30;
    m_ni_depth_generator.SetMapOutputMode(depth_mode);

    status = m_driver.niContext().SetGlobalMirror(m_mirrored);
    m_driver.checkXnError(status, "Mirror");

    status = m_ni_rgb_generator.Create(m_driver.niContext(), &query);
    if (status != XN_STATUS_OK)
    {
        m_has_rgb = false;
        ntk_dbg(1) << "Warning: no color stream!";
    }

    if (m_has_rgb)
    {
        status = m_ni_rgb_generator.SetIntProperty ("Resolution", 1);
        m_driver.checkXnError(status, "Resolution");

        XnMapOutputMode rgb_mode;
        if (m_high_resolution)
        {
            rgb_mode.nFPS = 15;
            rgb_mode.nXRes = 1280;
            rgb_mode.nYRes = 1024;
        }
        else
        {
            rgb_mode.nXRes = 640;
            rgb_mode.nYRes = 480;
            rgb_mode.nFPS = 30;
        }
        status = m_ni_rgb_generator.SetMapOutputMode(rgb_mode);
        m_driver.checkXnError(status, "Set RGB mode");

        ntk_throw_exception_if(!m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT), "Cannot register images.");
        m_ni_depth_generator.GetAlternativeViewPointCap().SetViewPoint(m_ni_rgb_generator);

        if (!is_kinect && m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_FRAME_SYNC))
        {
            ntk_dbg(1) << "Frame Sync supported.";
            status = m_ni_depth_generator.GetFrameSyncCap ().FrameSyncWith (m_ni_rgb_generator);
            m_driver.checkXnError(status, "Set Frame Sync");
        }

        status = m_ni_ir_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create infrared generator");
        XnMapOutputMode ir_mode;
        ir_mode.nFPS = 15;
        ir_mode.nXRes = 1280;
        ir_mode.nYRes = 1024;
        m_ni_ir_generator.SetMapOutputMode(ir_mode);
    }

    if (m_track_users)
    {
        status = m_ni_user_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create user generator");
        status = m_ni_hands_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create hands generator");
        status = m_ni_gesture_generator.Create(m_driver.niContext(), &query);
        m_driver.checkXnError(status, "Create gestures generator");
    }

    if (m_track_users)
    {
        ntk_throw_exception_if(!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_SKELETON),
                               "Supplied user generator doesn't support skeleton.");

        XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
        m_ni_user_generator.RegisterUserCallbacks(User_NewUser, User_LostUser, this, hUserCallbacks);
        m_ni_user_generator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, this, hCalibrationCallbacks);

        if (m_ni_user_generator.GetSkeletonCap().NeedPoseForCalibration())
        {
            m_need_pose_to_calibrate = true;
            if (!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
                ntk_throw_exception("Pose required, but not supported\n");
            m_ni_user_generator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, this, hPoseCallbacks);
            m_ni_user_generator.GetSkeletonCap().GetCalibrationPose(m_calibration_pose);
        }

        m_ni_user_generator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

        if (m_body_event_detector)
            m_body_event_detector->initialize(m_driver.niContext(), m_ni_depth_generator);
    }

    status = m_ni_depth_generator.StartGenerating();
    m_driver.checkXnError(status, "Depth::StartGenerating");

    if (m_has_rgb)
    {
        status = m_ni_rgb_generator.StartGenerating();
        m_driver.checkXnError(status, "RGB::StartGenerating");

        if (m_custom_bayer_decoding)
        {
            // Grayscale to get raw Bayer pattern.
            status = m_ni_rgb_generator.SetIntProperty ("InputFormat", 6);
            m_driver.checkXnError(status, "Change input format");

            status = m_ni_rgb_generator.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT);
            m_driver.checkXnError(status, "Change pixel format");
        }
    }

    if (m_track_users)
    {
        status = m_ni_user_generator.StartGenerating();
        m_driver.checkXnError(status, "User::StartGenerating");

        status = m_ni_hands_generator.StartGenerating();
        m_driver.checkXnError(status, "Hands::StartGenerating");

        status = m_ni_gesture_generator.StartGenerating();
        m_driver.checkXnError(status, "Gesture::StartGenerating");
    }

    waitAndUpdateActiveGenerators();
    if (!m_calib_data)
        estimateCalibration();

    m_connected = true;
    return true;
}
Ejemplo n.º 7
0
void NiteRGBDGrabber :: initialize()
{
    xn::EnumerationErrors errors;    
//    XnStatus status = m_ni_context.InitFromXmlFile(cfg.c_str(), &errors);
    
    XnStatus status = m_ni_context.Init();
    
    if (status != XN_STATUS_OK) {
        ntk_dbg(0) << "[ERROR] " << xnGetStatusString(status);
        ntk_throw_exception("Could not initialize NITE. Check Log."
                            "Most probable reasons are device not connected or no config/ directory"
                            "in the current directory.");
    }
    
    XnLicense license;
    strcpy(license.strKey, "0KOIk2JeIBYClPWVnMoRKn5cdY4=");
    strcpy(license.strVendor, "PrimeSense");
    status = m_ni_context.AddLicense(license);
    check_error(status, "add license");
        
    // find device & set name: 
    xn::Query devQuery;
    devQuery.SetCreationInfo(kinect_id.c_str());
    check_error(status, "set dev query creation info");

    xn::NodeInfoList device_node_info_list;     
    status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_DEVICE, &devQuery, device_node_info_list, NULL); 
    check_error(status, "device enum");
    
    xn::NodeInfoList::Iterator nodeIt = device_node_info_list.Begin();
    xn::NodeInfo info(*nodeIt); 
    status = info.SetInstanceName("Cam1");    
    check_error(status, "set device name");

    status = m_ni_context.CreateProductionTree(info);
    check_error(status, "create device production tree");
    

    // enumerate depth nodes: 
    xn::Query depthQuery;
    status = depthQuery.AddNeededNode("Cam1");
    check_error(status, "set depth query needed node");

    xn::NodeInfoList depth_node_info_list;     
    status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_DEPTH, &depthQuery, depth_node_info_list, NULL); 
    check_error(status, "depth enum");
    nodeIt = depth_node_info_list.Begin();
    info = xn::NodeInfo(*nodeIt);
    
    status = info.SetInstanceName("Depth1");    
    check_error(status, "set depth name");
        
    status = m_ni_context.CreateProductionTree(info);
    check_error(status, "create depth production tree");
    


    status = info.GetInstance(m_ni_depth_generator);
    check_error(status, "create depth generator instance");

    XnMapOutputMode mode;
    mode.nXRes = 640;
    mode.nYRes = 480;
    mode.nFPS = 30;
    
    status = m_ni_depth_generator.SetMapOutputMode(mode);
    check_error(status, "set depth map mode");
    
    // enumerate rgb nodes: 
    xn::NodeInfoList image_node_info_list; 
    status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_IMAGE, &depthQuery, image_node_info_list, NULL); 
    check_error(status, "image enum");
    nodeIt = image_node_info_list.Begin();
    info = xn::NodeInfo(*nodeIt);
    
    status = info.SetInstanceName("Image1");    
    check_error(status, "set image name");
    
    status = m_ni_context.CreateProductionTree(info);
    check_error(status, "create image production tree");
    
    status = info.GetInstance(m_ni_rgb_generator);
    check_error(status, "create image generator instance");
    
    XnMapOutputMode mode2;
    mode2.nXRes = 640;
    mode2.nYRes = 480;
    mode2.nFPS = 30;
    
    status = m_ni_rgb_generator.SetMapOutputMode(mode2);
    check_error(status, "set rgb map mode");

    if (enable_skeleton_tracking) {
        // enumerate user nodes: 
        xn::NodeInfoList user_node_info_list; 
        status = m_ni_context.EnumerateProductionTrees(XN_NODE_TYPE_USER, &depthQuery, user_node_info_list, NULL); 
        check_error(status, "user enum");
        nodeIt = user_node_info_list.Begin();
        info = xn::NodeInfo(*nodeIt);
        
        status = info.SetInstanceName("User1");    
        check_error(status, "set user name");
        
        status = m_ni_context.CreateProductionTree(info);
        check_error(status, "create user production tree");
        
        status = info.GetInstance(m_ni_user_generator);
        check_error(status, "create user generator instance");   
    }
    
    status = m_ni_context.StartGeneratingAll();
    check_error(status, "StartGenerating");



  if (m_high_resolution)
  {
    XnMapOutputMode rgb_mode;
    rgb_mode.nFPS = 15;
    rgb_mode.nXRes = 1280;
    rgb_mode.nYRes = 1024;
    m_ni_rgb_generator.SetMapOutputMode(rgb_mode);
  }

  if (m_custom_bayer_decoding)
  {
    // Grayscale to get raw Bayer pattern.
    status = m_ni_rgb_generator.SetIntProperty ("InputFormat", 6);
    check_error(status, "Change input format");

    status = m_ni_rgb_generator.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT);
    check_error(status, "Change pixel format");
  }

  ntk_ensure(m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT), "Cannot register images.");
  m_ni_depth_generator.GetAlternativeViewPointCap().SetViewPoint(m_ni_rgb_generator);

    if (enable_skeleton_tracking) {
        ntk_throw_exception_if(!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_SKELETON),
                               "Supplied user generator doesn't support skeleton.");
        
        XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
        m_ni_user_generator.RegisterUserCallbacks(User_NewUser, User_LostUser, this, hUserCallbacks);
        m_ni_user_generator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, this, hCalibrationCallbacks);
        
        if (m_ni_user_generator.GetSkeletonCap().NeedPoseForCalibration())
        {
            m_need_pose_to_calibrate = true;
            if (!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
                ntk_throw_exception("Pose required, but not supported\n");
            m_ni_user_generator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, this, hPoseCallbacks);
            m_ni_user_generator.GetSkeletonCap().GetCalibrationPose(m_calibration_pose);
        }
        
        m_ni_user_generator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
        
        if (m_body_event_detector)
            m_body_event_detector->initialize(m_ni_context, m_ni_depth_generator);        
    }

  status = m_ni_context.StartGeneratingAll();
  check_error(status, "StartGenerating");

  m_ni_context.WaitAndUpdateAll();
  estimateCalibration();
}
Ejemplo n.º 8
0
void RGBDCalibration :: loadFromFile(const char* filename)
{
    QFileInfo f (filename);
    ntk_throw_exception_if(!f.exists(), "Could not find calibration file.");
    cv::FileStorage calibration_file (filename, CV_STORAGE_READ);
    readMatrix(calibration_file, "rgb_intrinsics", rgb_intrinsics);
    readMatrix(calibration_file, "rgb_distortion", rgb_distortion);
    zero_rgb_distortion = rgb_distortion(0,0) < 1e-5 ? true : false;
    readMatrix(calibration_file, "depth_intrinsics", depth_intrinsics);
    readMatrix(calibration_file, "depth_distortion", depth_distortion);
    zero_depth_distortion = depth_distortion(0,0) < 1e-5 ? true : false;

    readMatrix(calibration_file, "R", R);
    readMatrix(calibration_file, "T", T);
    cv::Mat1i size_mat;
    readMatrix(calibration_file, "rgb_size", size_mat);
    rgb_size = cv::Size(size_mat(0,0), size_mat(0,1));
    readMatrix(calibration_file, "raw_rgb_size", size_mat);
    raw_rgb_size = cv::Size(size_mat(0,0), size_mat(0,1));
    readMatrix(calibration_file, "depth_size", size_mat);
    depth_size = cv::Size(size_mat(0,0), size_mat(0,1));
    readMatrix(calibration_file, "raw_depth_size", size_mat);
    raw_depth_size = cv::Size(size_mat(0,0), size_mat(0,1));

    try {
        readMatrix(calibration_file, "R_extrinsics", R_extrinsics);
        readMatrix(calibration_file, "T_extrinsics", T_extrinsics);
    }
    catch (...)
    {
        ntk_dbg(0) << "Warning: could not load extrinsics (R_extrinsics, T_extrinsics).";
    }

    try
    {
        cv::Mat1i size_mat;
        readMatrix(calibration_file, "infrared_size", size_mat);
        infrared_size = cv::Size(size_mat(0,0), size_mat(0,1));
    }
    catch (...)
    {
        ntk_dbg(1) << "Could not read infrared image size, setting default 1280x1024";
        infrared_size = cv::Size(1280, 1024);
    }

    try
    {
        readMatrix(calibration_file, "infrared_intrinsics", infrared_intrinsics);
        readMatrix(calibration_file, "infrared_distortion", infrared_distortion);
    } catch (...)
    {
        ntk_dbg(1) << "Warning: cannot read infrared camera intrinsics, computing from depth.";
        computeInfraredIntrinsicsFromDepth();
    }

    cv::Mat1f depth_calib (1,2);
    try {
        readMatrix(calibration_file, "depth_base_and_offset", depth_calib);
        depth_baseline = depth_calib(0,0);
        depth_offset = depth_calib(0,1);
    }
    catch(...)
    {
        ntk_dbg(1) << "Warning: could not load depth offset";
    }

    try {
        cv::Mat1f depth_multiplicative_correction_factor (1,1);
        readMatrix(calibration_file, "depth_multiplicative_correction_factor", depth_multiplicative_correction_factor);
        this->depth_multiplicative_correction_factor = depth_multiplicative_correction_factor(0,0);
    }
    catch(...)
    {
        ntk_dbg(1) << "Warning: could not load depth multiplicative factor";
    }

    try {
        cv::Mat1f depth_additive_correction_factor (1,1);
        readMatrix(calibration_file, "depth_additive_correction_factor", depth_additive_correction_factor);
        this->depth_additive_correction_factor = depth_additive_correction_factor(0,0);
    }
    catch(...)
    {
        ntk_dbg(1) << "Warning: could not load depth additive correction factor";
    }

    calibration_file.release();

    depth_pose = new Pose3D();
    rgb_pose = new Pose3D();
    updatePoses();
}
Ejemplo n.º 9
0
void NiteRGBDGrabber :: initialize()
{
  const char* config_file = "config/NestkConfig.xml";

  xn::EnumerationErrors errors;
  XnStatus status = m_ni_context.InitFromXmlFile(config_file, &errors);
  if (status != XN_STATUS_OK)
  {
    ntk_dbg(0) << "[ERROR] " << xnGetStatusString(status);
    ntk_throw_exception("Could not initialize NITE. Check Log."
                        "Most probable reasons are device not connected or no config/ directory"
                        "in the current directory.");
  }

  status = m_ni_context.FindExistingNode(XN_NODE_TYPE_DEPTH, m_ni_depth_generator);
  check_error(status, "Find depth generator");

  status = m_ni_context.FindExistingNode(XN_NODE_TYPE_USER, m_ni_user_generator);
  check_error(status, "Find user generator");

  status = m_ni_context.FindExistingNode(XN_NODE_TYPE_IMAGE, m_ni_rgb_generator);
  check_error(status, "Find image generator");

  if (m_high_resolution)
  {
    XnMapOutputMode rgb_mode;
    rgb_mode.nFPS = 15;
    rgb_mode.nXRes = 1280;
    rgb_mode.nYRes = 1024;
    m_ni_rgb_generator.SetMapOutputMode(rgb_mode);
  }

  if (m_custom_bayer_decoding)
  {
    // Grayscale to get raw Bayer pattern.
    status = m_ni_rgb_generator.SetIntProperty ("InputFormat", 6);
    check_error(status, "Change input format");

    status = m_ni_rgb_generator.SetPixelFormat(XN_PIXEL_FORMAT_GRAYSCALE_8_BIT);
    check_error(status, "Change pixel format");
  }

  ntk_ensure(m_ni_depth_generator.IsCapabilitySupported(XN_CAPABILITY_ALTERNATIVE_VIEW_POINT), "Cannot register images.");
  m_ni_depth_generator.GetAlternativeViewPointCap().SetViewPoint(m_ni_rgb_generator);

  ntk_throw_exception_if(!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_SKELETON),
                         "Supplied user generator doesn't support skeleton.");

  XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
  m_ni_user_generator.RegisterUserCallbacks(User_NewUser, User_LostUser, this, hUserCallbacks);
  m_ni_user_generator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, this, hCalibrationCallbacks);

  if (m_ni_user_generator.GetSkeletonCap().NeedPoseForCalibration())
  {
    m_need_pose_to_calibrate = true;
    if (!m_ni_user_generator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
      ntk_throw_exception("Pose required, but not supported\n");
    m_ni_user_generator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, this, hPoseCallbacks);
    m_ni_user_generator.GetSkeletonCap().GetCalibrationPose(m_calibration_pose);
  }

  m_ni_user_generator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

  if (m_body_event_detector)
    m_body_event_detector->initialize(m_ni_context, m_ni_depth_generator);

  status = m_ni_context.StartGeneratingAll();
  check_error(status, "StartGenerating");

  m_ni_context.WaitAndUpdateAll();
  estimateCalibration();
}