コード例 #1
0
void OpenNIDevice::readDepthFrame(shared_ptr<DepthFrame> depthFrame)
{
    // Because readFrame method is a waiting read, if readDepthFrame is invoked when
    // last frame has already been read, then return inmediately.
    if (m_device.isFile() && m_lastFrame >= this->getTotalFrames())
        return;

    if (m_oniDepthStream.readFrame(&m_oniDepthFrame) != openni::STATUS_OK) {
        throw 1;
    }

    if (!m_oniDepthFrame.isValid()) {
        throw 2;
    }

    m_lastFrame = dai::max<int>(m_lastFrame, m_oniDepthFrame.getFrameIndex());

    int strideDepth = m_oniDepthFrame.getStrideInBytes() / sizeof(openni::DepthPixel) - m_oniDepthFrame.getWidth();

    if (strideDepth > 0) {
        qWarning() << "WARNING: OpenNIDevice - Not managed depth stride!!!";
        throw 3;
    }

    depthFrame->setDataPtr(640, 480, (uint16_t*) m_oniDepthFrame.getData());
    depthFrame->setIndex(m_oniDepthFrame.getFrameIndex());
    depthFrame->setDistanceUnits(dai::DISTANCE_MILIMETERS);

    if (m_manual_registration) {
        depth2color(depthFrame);
    }
}
コード例 #2
0
void Display(void)
{
	static ColorImage grayImage;
	memset(grayImage.data, 0x80, sizeof(grayImage.data));
	grayImage.rows = grayImage.maxRows;
	grayImage.cols = grayImage.maxCols;

	// Get data maps from Kinect sensor
	static KinectData data;
	static KinectData remoteData;
	bool dataChanged = false;
	try
	{
		dataChanged = client->GetData(data);
	}
	catch(...)
	{
		fprintf(stderr, "Error: Could not get data!");
		return;
	}
	
	if(dataChanged == false)
		return;

	//Get visual representation of the depth image
	static ColorImage visualDepthImage;
	if (showDebug)
		depth2color(data.depthImage, visualDepthImage);

	// Apply effects (new version)
	viz.updateData(data);
	viz.applyEffects();

	// Get test image
	static ColorImage testImage;
	if (showDebug)
		CreateGraphicalImage(data, testImage);

	// Set the images for viewing
	View::SetTopLeftImage(viz.getImage());
	View::SetTopRightImage(visualDepthImage);
	View::SetBottomLeftImage(testImage);
	
	return;
}
コード例 #3
0
void IASLAB_RGBD_ID_Instance::nextFrame(QHashDataFrames &output)
{
    // Read Color File
    QString instancePath = m_info.parent().getPath() + "/" + m_info.getFileName(DataFrame::Color);
    cv::Mat color_mat = cv::imread(instancePath.toStdString());
    cv::cvtColor(color_mat, color_mat, CV_BGR2RGB);
    ColorFrame srcColor(color_mat.cols, color_mat.rows, (RGBColor*) color_mat.data);
    shared_ptr<ColorFrame> dstColor = make_shared<ColorFrame>();
    *dstColor = srcColor; // Copy
    output.insert(DataFrame::Color, dstColor);

    // Read Depth File
    instancePath = m_info.parent().getPath() + "/" + m_info.getFileName(DataFrame::Depth);
    QFile depthFile(instancePath);
    depthFile.open(QIODevice::ReadOnly);
    QByteArray buffer = depthFile.readAll();
    depthFile.close();
    shared_ptr<DepthFrame> depthFrame_tmp = make_shared<DepthFrame>(640, 480, (uint16_t*) (buffer.data() + 16));
    shared_ptr<DepthFrame> depthFrame = shared_ptr<DepthFrame>(new DepthFrame(*depthFrame_tmp)); // Clone!
    depthFrame->setDistanceUnits(dai::DISTANCE_MILIMETERS);
    // Set Depth intrinsics of the camera that generated this frame
    depthFrame->setCameraIntrinsics(fx_d, cx_d, fy_d, cy_d);
    output.insert(DataFrame::Depth, depthFrame);

    // Read Mask File
    instancePath = m_info.parent().getPath() + "/" + m_info.getFileName(DataFrame::Mask);
    QFile maskFile(instancePath);
    maskFile.open(QIODevice::ReadOnly);
    buffer = maskFile.readAll();
    maskFile.close();
    shared_ptr<MaskFrame> maskFrame_tmp = make_shared<MaskFrame>(640, 480, (uint8_t*) (buffer.data() + 14));
    shared_ptr<MaskFrame> maskFrame = shared_ptr<MaskFrame>(new MaskFrame(*maskFrame_tmp)); // Clone!
    output.insert(DataFrame::Mask, maskFrame);

    // Read Skeleton txt file (line by line)
    /*For every frame, a skeleton file is available. For every joint, a row with the following information is written to the skeleton file:
    [id]: person ID,
    [x3D], [y3D], [z3D]: joint 3D position,
    [x2D], [y2D]: joint 2D position,
    [TrackingState]: 0 (not tracked), 1 (inferred) or 2 (tracked),
    [QualityFlag]: not implemented by NiTE, thus it is always 0,
    [OrientationStartJoint], [OrientationEndJoint]: indices of the extreme joints of a link,
    [Qx], [Qy], [Qz], [Qw]: quaternion expressing the orientation of the link identified by [OrientationStartJoint] and [OrientationEndJoint].
    NiTE provides 15 joints, but these are remapped to the 20 joints that Microsoft Kinect SDK estimates, in order to have the same format (the joint positions missing in NiTE (wrists, ankles) are copied from other joints (hands, feet)).
    For more information, please visit http://msdn.microsoft.com/en-us/library/hh973074.aspx.
    You can also visualize the data with the "visualize_IASLab_RGBDID_dataset.m" Matlab script we provided.*/
    instancePath = m_info.parent().getPath() + "/" + m_info.getFileName(DataFrame::Skeleton);
    QFile skeletonFile(instancePath);
    skeletonFile.open(QIODevice::ReadOnly);
    QTextStream in(&skeletonFile);
    shared_ptr<Skeleton> skeleton = make_shared<Skeleton>();

    QList<int> ignore_lines = {1, 7, 11, 15, 19};
    int lineCount = 0;

    while (!in.atEnd())
    {
        QStringList columns = in.readLine().split(",");

        if (ignore_lines.contains(lineCount)) {
            lineCount++;
            continue;
        }

        SkeletonJoint joint(Point3f(columns[1].toFloat()*1000.0f, columns[2].toFloat()*1000.0f*-1, columns[3].toFloat()*1000.0f), _staticMap[lineCount]);
        int trackingState = columns[6].toInt();

        if (trackingState == 0)
            joint.setPositionConfidence(0.0f);

        joint.setOrientation(Quaternion(columns[13].toFloat(),
                             columns[10].toFloat(), columns[11].toFloat(), columns[12].toFloat()));

        //joint.setOrientationConfidence(niteJoint.getOrientationConfiden
        skeleton->setJoint(_staticMap[lineCount], joint);
        lineCount++;
    }

    skeletonFile.close();

    // Set intrinsics of the camera that generated this frame (depth camera in this case)
    skeleton->setCameraIntrinsics(fx_d, cx_d, fy_d, cy_d);
    shared_ptr<SkeletonFrame> skeletonFrame = make_shared<SkeletonFrame>();
    skeletonFrame->setSkeleton(1, skeleton);
    output.insert(DataFrame::Skeleton, skeletonFrame);

    // Register depth to color
    depth2color(depthFrame, maskFrame, skeleton);
}
コード例 #4
0
void OpenNIDevice::readUserTrackerFrame(shared_ptr<DepthFrame> depthFrame, shared_ptr<MaskFrame> maskFrame,
                                        shared_ptr<SkeletonFrame> skeletonFrame, shared_ptr<MetadataFrame> metadataFrame)
{
    nite::UserTrackerFrameRef oniUserTrackerFrame;

    // Because readFrame method is a waiting read, if readUserTrackerFrame is invoked when
    // last frame has already been read, then return inmediately.
    if (m_device.isFile() && m_lastFrame >= this->getTotalFrames())
        return;

    if (m_oniUserTracker.readFrame(&oniUserTrackerFrame) != nite::STATUS_OK) {
        throw 1;
    }

    if (!oniUserTrackerFrame.isValid()) {
        throw 2;
    }

    m_lastFrame = dai::max<int>(m_lastFrame, oniUserTrackerFrame.getFrameIndex());

    // Depth Frame
    if (depthFrame) {
        m_oniDepthFrame = oniUserTrackerFrame.getDepthFrame();

        int strideDepth = m_oniDepthFrame.getStrideInBytes() / sizeof(openni::DepthPixel) - m_oniDepthFrame.getWidth();

        if (strideDepth > 0) {
            qWarning() << "WARNING: OpenNIDevice - Not managed depth stride!!!";
            throw 3;
        }

        depthFrame->setDataPtr(640, 480, (uint16_t*) m_oniDepthFrame.getData());
        depthFrame->setIndex(m_oniDepthFrame.getFrameIndex());
        depthFrame->setDistanceUnits(dai::DISTANCE_MILIMETERS);
    }

    // Load User Labels (copy)
    if (maskFrame) {
        const nite::UserMap& userMap = oniUserTrackerFrame.getUserMap();

        int strideUser = userMap.getStride() / sizeof(nite::UserId) - userMap.getWidth();

        if (strideUser > 0) {
            qWarning() << "WARNING: OpenNIRuntime - Not managed user stride!!!";
            throw 1;
        }

        const nite::UserId* pLabel = userMap.getPixels();

        for (int i=0; i < userMap.getHeight(); ++i) {
            uint8_t* pMask = maskFrame->getRowPtr(i);
            for (int j=0; j < userMap.getWidth(); ++j) {
                pMask[j] = *pLabel;
                pLabel++;
            }
        }

        maskFrame->setIndex(oniUserTrackerFrame.getFrameIndex());
    }

    // Registration
    if (m_manual_registration) {
        depth2color(depthFrame, maskFrame);
    }

    // Process Users
    if (skeletonFrame) {
        skeletonFrame->clear();
        skeletonFrame->setIndex(oniUserTrackerFrame.getFrameIndex());
    }

    if (metadataFrame) {
        metadataFrame->boundingBoxes().clear();
        metadataFrame->setIndex(oniUserTrackerFrame.getFrameIndex());
    }

    const nite::Array<nite::UserData>& users = oniUserTrackerFrame.getUsers();

    for (int i=0; i<users.getSize(); ++i)
    {
        const nite::UserData& user = users[i];

        if (user.isNew()) {
            m_oniUserTracker.startSkeletonTracking(user.getId());
        }
        else if (user.isVisible())
        {
            // Get Boundingbox
            if (metadataFrame) {
                const nite::BoundingBox niteBoundingBox = user.getBoundingBox();
                const NitePoint3f bbMin = niteBoundingBox.min;
                const NitePoint3f bbMax = niteBoundingBox.max;
                dai::BoundingBox boundingBox(dai::Point3f(bbMin.x, bbMin.y, bbMin.z),
                                             dai::Point3f(bbMax.x, bbMax.y, bbMax.z));
                metadataFrame->boundingBoxes().append(boundingBox);
            }

            // Get Skeleton
            if (skeletonFrame) {
                const nite::Skeleton& oniSkeleton = user.getSkeleton();
                const nite::SkeletonJoint& head = user.getSkeleton().getJoint(nite::JOINT_HEAD);

                if (oniSkeleton.getState() == nite::SKELETON_TRACKED && head.getPositionConfidence() > 0.5)
                {
                    auto daiSkeleton = skeletonFrame->getSkeleton(user.getId());

                    if (daiSkeleton == nullptr) {
                        daiSkeleton = make_shared<dai::Skeleton>(dai::Skeleton::SKELETON_OPENNI);
                        daiSkeleton->setDistanceUnits(dai::DISTANCE_MILIMETERS);
                        skeletonFrame->setSkeleton(user.getId(), daiSkeleton);
                    }

                    OpenNIDevice::copySkeleton(oniSkeleton, *(daiSkeleton.get()));
                    //daiSkeleton->computeQuaternions();
                }
            }
        }
        else if (user.isLost()) {
            m_oniUserTracker.stopSkeletonTracking(user.getId());
        }
    } // End for
}