Пример #1
0
void Inspector::updateDepth(const openni_wrapper::Image& image,
                            const openni_wrapper::DepthImage& depth)
{
  frame_.depth_->setZero();
  ushort data[depth.getHeight() * depth.getWidth()];
  depth.fillDepthImageRaw(depth.getWidth(), depth.getHeight(), data);
  int i = 0;
  for(size_t y = 0; y < depth.getHeight(); ++y) {
    for(size_t x = 0; x < depth.getWidth(); ++x, ++i) {
      if(data[i] == depth.getNoSampleValue() || data[i] == depth.getShadowValue())
        continue;
      frame_.depth_->coeffRef(y, x) = data[i];
    }
  }

  if(dddm_ && use_intrinsics_)
    dddm_->undistort(frame_.depth_.get());

  frame_.img_ = oniToCV(image);
    
  Cloud::Ptr cloud(new Cloud);
  FrameProjector proj;
  proj.width_ = 640;
  proj.height_ = 480;
  proj.cx_ = proj.width_ / 2;
  proj.cy_ = proj.height_ / 2;
  proj.fx_ = 525;
  proj.fy_ = 525;
  proj.frameToCloud(frame_, cloud.get());
  pcd_vis_.updatePointCloud(cloud, "cloud");
  pcd_vis_.spinOnce(5);
}
Пример #2
0
void
DataCapture::saveDepthmap (const std::string &filename, const openni_wrapper::DepthImage &depthmap, unsigned char depth_res) const
{
  // get meta data associated with the depht image
  xn::DepthMetaData& md = const_cast<xn::DepthMetaData&> (depthmap.getDepthMetaData());

/*  cout << "depth image data:" << endl;
	cout << " - focal length = " << depthmap.getFocalLength() << endl;
	cout << " - base line    = " << depthmap.getBaseline() << endl;
	cout << " - depth res.   = " << md.ZRes() << endl;
*/

  // copy depth values into a buffer
  int width = depthmap.getWidth(), height = depthmap.getHeight();
  unsigned short* buffer = new unsigned short[width*height];
  depthmap.fillDepthImageRaw (width, height, buffer);
  cv::Mat_<unsigned short> mat16bit (height, width, buffer);

  // scale according to the depth resolution provided by the sensor
  double scale = (std::pow(2.0, depth_res) - 1.0) / md.ZRes();
  std::cout << "Max resolution of depth sensor is: " << md.ZRes() << std::endl;

  switch (depth_res)
  {
    case 8: {
      // convert from 16 to 8 bit per depth value
      cv::Mat mat8bit (height, width, CV_8U);
      mat16bit.convertTo ( mat8bit, CV_8U, scale);
      cv::imwrite (filename + "_depth.png", mat8bit);
      break;
    }
    case 16: {
      //mat16bit *= scale;
      cv::imwrite (filename + "_depth.png", mat16bit);
      break;
    }
    default: {
      std::cerr << "[saveDepthmap] Depth size of " << depth_res << " not supported (use 8 or 16)" << std::endl;
      break;
    }
  }

  // std::cout << "Depth image saved as PNG (" << (int)depth_res << ") to " << filename << "_depth.png" << std::endl;
}
Пример #3
0
  void OpenNINode::publishDepthImage(const openni_wrapper::DepthImage& depth, image_transport::CameraPublisher depth_pub) const {
      sensor_msgs::ImagePtr depth_msg = boost::make_shared<sensor_msgs::Image>();
      ros::Time time = ros::Time::now();
      depth_msg->header.stamp    = time;
      depth_msg->encoding        = sensor_msgs::image_encodings::TYPE_32FC1;
      depth_msg->width           = depth_width_;
      depth_msg->height          = depth_height_;
      depth_msg->step            = depth_msg->width * sizeof(float);
      depth_msg->data.resize(depth_msg->height * depth_msg->step);

      depth.fillDepthImage(depth_msg->width, depth_msg->height, reinterpret_cast<float*>(&depth_msg->data[0]), depth_msg->step);

      depth_pub.publish(depth_msg, getDepthCameraInfo(depth_msg->width, depth_msg->height, time, 1));
      //pub.publish(depth_msg);
    }