InteractivePointCloud::InteractivePointCloud()
{
	m_boundingBox = new BoundingBox<Vertex<float> >(
			Vertex<float>(-8, -8, -8),
			Vertex<float>(8, 8, 8)
			);

	updateBuffer(PointBufferPtr());
}
PointBufferPtr KinectIO::getBuffer()
{
	// Get depth image from sensor
	std::vector<short> depthImage(480 * 680, 0);
	m_grabber->getDepthImage(depthImage);

	std::vector<uint8_t> colorImage(480 * 680 * 3, 0);
	m_grabber->getColorImage(colorImage);

	std::set<int> nans;
	for(size_t i = 0; i < depthImage.size(); i++)
	{
		if(isnan(depthImage[i])) nans.insert(i);
	}

	// Return null pointer if no image was grabbed
	if(depthImage.size() == 0) return PointBufferPtr();

	size_t numPoints = depthImage.size() - nans.size();

	// Convert depth image into point cloud
	PointBufferPtr buffer(new PointBuffer);
	floatArr points(new float[numPoints * 3]);
	ucharArr colors(new uchar[numPoints * 3]);

	int i,j;
	int index = 0;
	int c = 0;
	for (i = 0; i < 480; i++) {
		for (j = 0; j < 640; j++) {

			if(nans.find(c) == nans.end())
			{
				Eigen::Vector4f v;
				v << j, i, (float)(depthImage[i * 640 + j]), 1.0f;
				v = m_depthMatrix.transpose() * v;

				points[3 * index    ] = v(0) / v(3);
				points[3 * index + 1] = v(1) / v(3);
				points[3 * index + 2] = v(2) / v(3);

				colors[3 * index    ] = colorImage[3 * c    ];
				colors[3 * index + 1] = colorImage[3 * c + 1];
				colors[3 * index + 2] = colorImage[3 * c + 2];
				index++;
			}
			c++;
		}
	}

	buffer->setPointArray(points, numPoints);
	buffer->setPointColorArray(colors, numPoints);
	return buffer;
}
Beispiel #3
0
 /// Return a new point buffer with the same point context as this
 /// point buffer.
 PointBufferPtr makeNew() const
     { return PointBufferPtr(new PointBuffer(m_context)); }
 Model( PointBufferPtr p = PointBufferPtr(), MeshBufferPtr m = MeshBufferPtr() )
 {
     m_pointCloud = p;
     m_mesh = m;
 };