Beispiel #1
0
Face::FaceData::VectorOfPoints Kinect::depthToVectorOfPoints(double *depth)
{
    Face::FaceData::VectorOfPoints result;

    int i = 0;
    for (int r = 0; r < 480; r++)
    {
        for (int c = 0; c < 640; c++)
        {
            if (depth[i] != 0)
            {
                double z =  depth[i];
                double coef = (z + mindistance) * scaleFactor;
                double y = -(r - 240) * coef;
                double x = -(320 - c) * coef;

                z = -z;

                cv::Point3d p(x, y, z);
                result.push_back(p);
            }

            i++;
        }
    }

    return result;
}
Beispiel #2
0
Face::FaceData::Mesh *Kinect::createMesh(double *depth, uchar *rgb)
{
    int DepthIndextoPointIndex[n];

    Face::FaceData::VectorOfPoints points;
    Face::FaceData::Mesh::Colors colors;
    int i = 0;
    for (int r = 0; r < 480; r++)
    {
        for (int c = 0; c < 640; c++)
        {
            if (depth[i] != 0)
            {
                double z =  -depth[i];
                double coef = (depth[i] + mindistance) * scaleFactor;
                double y = -(r - 240) * coef;
                double x = -(320 - c) * coef;

                // 3d data
                cv::Point3d p(x, y, z);
                points.push_back(p);

                DepthIndextoPointIndex[i] = points.size()-1;

                // texture
                int i3 = 3*i;
                unsigned char r = rgb[i3];
                unsigned char g = rgb[i3 + 1];
                unsigned char b = rgb[i3 + 2];
                Face::FaceData::Mesh::Color c(b, g, r);
                colors.push_back(c);
            }

            i++;
        }
    }

    Face::FaceData::Mesh *result = new Face::FaceData::Mesh();
    int pCount = points.size();
    result->pointsMat = Matrix(pCount, 3);
    for (int r = 0; r < pCount; r++)
    {
        result->pointsMat(r, 0) = points[r].x;
        result->pointsMat(r, 1) = points[r].y;
        result->pointsMat(r, 2) = points[r].z;
    }
    result->colors = colors;
    result->recalculateMinMax();

    for (int i = 0; i < n; i++)
    {
        int x = i % 640;
        if (x == 639) continue;

        int y = i / 640;
        if (y == 479) continue;

        short val = depth[i];
        if (val == 0) continue;

        short down = depth[i+640];
        short right = depth[i+1];
        short downRight = depth[i+641];

        if (down != 0 && downRight != 0)
            result->triangles.push_back(cv::Vec3i(DepthIndextoPointIndex[i],DepthIndextoPointIndex[i+640],DepthIndextoPointIndex[i+641]));
        if (right != 0 && downRight != 0)
            result->triangles.push_back(cv::Vec3i(DepthIndextoPointIndex[i],DepthIndextoPointIndex[i+641],DepthIndextoPointIndex[i+1]));
    }

    return result;
}
Beispiel #3
0
void OccipitalSensor::scan()
{
    while (cv::waitKey(30) < 0)
    {
        cv::imshow("ir", irData/255.0);
        cv::imshow("depth", depthData/30000.0);
    }

    int frames = 20;
    Matrix cumulativeDepth = Matrix::zeros(Height, Width);
    Matrix hitCounter = Matrix::zeros(Height, Width);
    for (int i = 0; i < frames; i++)
    {
        for (int r = 0; r < Height; r++)
        {
            for (int c = 0; c < Width; c++)
            {
                if (depthData(r, c) > 5000 && depthData(r, c) < 15000)
                {
                    cumulativeDepth(r, c) += depthData(r, c);
                    hitCounter(r, c)++;
                }
            }
        }
        cv::imshow("ir", irData/255.0);
        cv::imshow("depth", cumulativeDepth/30000.0/(i+1));
        cv::waitKey(30);
    }

    for (int r = 0; r < Height; r++)
    {
        for (int c = 0; c < Width; c++)
        {
            if (hitCounter(r, c) == 0) continue;
            cumulativeDepth(r, c) = cumulativeDepth(r, c) / hitCounter(r, c);
        }
    }

    Face::FaceData::VectorOfPoints pointcloud;
    Face::FaceData::Mesh::Colors colors;
    for (int r = 0; r < Height; r++)
    {
        for (int c = 0; c < Width; c++)
        {
            if (cumulativeDepth(r, c) == 0) continue;

            float x, y, z;
            openni::CoordinateConverter::convertDepthToWorld(depthStream, c, r, cumulativeDepth(r, c), &x, &y, &z);
            pointcloud.push_back(cv::Point3d(x/10, y/10, -z/10));

            uchar intensity = irData(r, c);
            colors.push_back(Face::FaceData::Mesh::Color(intensity, intensity, intensity));
        }
    }
    m = Face::FaceData::Mesh::fromPointcloud(pointcloud, true, true);
    m.colors = colors;

    depthStream.start();

    cv::destroyAllWindows();
}
void RealSenseSensorImpl::doCapturingDone()
{
	std::cout << "Capturing done" << std::endl;

	int count = 0;
	Face::FaceData::VectorOfPoints points;
	Face::FaceData::Mesh::Colors colors;
	Face::FaceData::Mesh::UVMap uvMap;

	int averagedCount = 0;
	Face::FaceData::VectorOfPoints averagedPoints;
	Face::FaceData::Mesh::Colors averagedColors;
	Face::FaceData::Mesh::UVMap averagedUvMap;
	
	auto colorRoi = toRealSizeRoi(tracker->getLastRegion());
	int depthCount = 0;
	double zThreshold = nearestZpoint + settings.captureDepth;
	for (int r = 0; r < 480; r++) {
		for (int c = 0; c < 640; c++) {
			int i = r * 640 + c;

			//skip points to far from the nearest point
			if (depthMatrix(r, c) > zThreshold) continue;

			// skip not-valid points
			if (vertices[i].x != 0 && vertices[i].y != 0 && vertices[i].z != 0) {
				const PXCPointF32 &uv = uvmap[i];
				int colorCol = uv.x * 640;
				int colorRow = uv.y * 480;

				// skip points outside ROI
				if (!cv::Point2i(colorCol, colorRow).inside(colorRoi)) {
					continue;
				}

				count++;

				cv::Point3d p(-vertices[i].x, vertices[i].y, -vertices[i].z);
				points.push_back(p);

				uvMap.push_back(cv::Vec2d(uv.x, uv.y));

				if (colorCol >= 640 || colorCol < 0 || colorRow >= 480 || colorRow < 0)
					colors.push_back(cv::Vec3b(0, 0, 0));
				else
					colors.push_back(colorPreview(colorRow, colorCol));
			}

			// skip not-valid points
			if (averagedVertices[i].x != 0 && averagedVertices[i].y != 0 && averagedVertices[i].z != 0 && averagedVertices[i].z <= zThreshold) {
				const PXCPointF32 &uv = averagedUvmap[i];
				int colorCol = uv.x * 640;
				int colorRow = uv.y * 480;

				// skip points outside ROI
				if (!cv::Point2i(colorCol, colorRow).inside(colorRoi)) {
					continue;
				}

				averagedCount++;

				cv::Point3d p(-averagedVertices[i].x, averagedVertices[i].y, -averagedVertices[i].z);
				averagedPoints.push_back(p);

				averagedUvMap.push_back(cv::Vec2d(uv.x, uv.y));

				if (colorCol >= 640 || colorCol < 0 || colorRow >= 480 || colorRow < 0)
					averagedColors.push_back(cv::Vec3b(0, 0, 0));
				else
					averagedColors.push_back(colorPreview(colorRow, colorCol));
			}

			depthCount++;
		}
	}

	std::cout << "Depth points: " << depthCount << std::endl;
	std::cout << "Added " << count << " points" << std::endl;
	std::cout << "Added " << averagedCount << " averaged points" << std::endl;

	_rawMesh = Face::FaceData::Mesh::fromPointcloud(points, false, true);
	_rawMesh.colors = colors;
	_rawMesh.uvmap = uvMap;
	cv::Point3d shift = _rawMesh.centralize();
	_rawLandmarks.translate(shift);
	
	_processedMesh = Face::FaceData::Mesh::fromPointcloud(averagedPoints, false, true);
	_processedMesh.colors = averagedColors;
	_processedMesh.uvmap = averagedUvMap;
	shift = _processedMesh.centralize();
	_processedLandmarks.translate(shift);

	setState(RealSenseSensor::STATE_OFF);
}