Пример #1
0
XnStatus XnServerSession::HandleGetStringProperty()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnSensorServerMessageGetPropertyRequest request;
	XnUInt32 nDataSize = sizeof(request);
	nRetVal = m_privateIncomingPacker.ReadCustomData(XN_SENSOR_SERVER_MESSAGE_GET_STRING_PROPERTY, &request, &nDataSize);
	XN_IS_STATUS_OK(nRetVal);
	if (nDataSize != sizeof(request))
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_SENSOR_SERVER, "Sensor server protocol error - invalid size!");
	}

	// get
	XnChar strValue[XN_DEVICE_MAX_STRING_LENGTH];
	XnStatus nActionResult = GetStringPropertyImpl(request.strModuleName, request.strPropertyName, strValue);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to get property '%s.%s': %s", m_nID, request.strModuleName, request.strPropertyName, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GET_STRING_PROPERTY, nActionResult, sizeof(strValue), strValue);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #2
0
XnStatus XnProperty::SetValue(const void* pValue)
{
	if (m_pSetCallback == NULL)
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_DEVICE_PROPERTY_READ_ONLY, XN_MASK_DDK, "Property %s.%s is read only.", GetModule(), GetName());
	}

	if (m_LogSeverity != -1)
	{
		XnChar strValue[XN_DEVICE_MAX_STRING_LENGTH];
		if (ConvertValueToString(strValue, pValue))
		{
			xnLogWrite(XN_MASK_DDK, (XnLogSeverity)m_LogSeverity, __FILE__, __LINE__, "Setting %s.%s to %s...", GetModule(), GetName(), strValue);
		}
		else
		{
			xnLogWrite(XN_MASK_DDK, (XnLogSeverity)m_LogSeverity, __FILE__, __LINE__, "Setting %s.%s...", GetModule(), GetName());
		}
	}

	if (!m_bAlwaysSet && IsActual() && IsEqual(m_pValueHolder, pValue))
	{
		xnLogWrite(XN_MASK_DDK, (XnLogSeverity)m_LogSeverity, __FILE__, __LINE__, "%s.%s value did not change.", GetModule(), GetName());
	}
	else
	{
		XnStatus nRetVal = CallSetCallback(m_pSetCallback, pValue, m_pSetCallbackCookie);
		if (nRetVal != XN_STATUS_OK)
		{
			if (m_LogSeverity != -1)
			{
				xnLogWrite(XN_MASK_DDK, (XnLogSeverity)m_LogSeverity, __FILE__, __LINE__, "Failed setting %s.%s: %s", GetModule(), GetName(), xnGetStatusString(nRetVal));
			}
			return (nRetVal);
		}
		else
		{
			xnLogWrite(XN_MASK_DDK, (XnLogSeverity)m_LogSeverity, __FILE__, __LINE__, "%s.%s was successfully set.", GetModule(), GetName());
		}
	}

	return (XN_STATUS_OK);
}
Пример #3
0
int main (int argc, char * argv[])
{
    IplImage* camera = 0;
    
    try {
        // コンテキストの初期化
        xn::Context context;
        XnStatus rc = context.InitFromXmlFile(CONFIG_XML_PATH);
        if (rc != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(rc));
        }
        
        // 鏡モード(反転)にする
        context.SetGlobalMirror(TRUE);
        
        // イメージジェネレータの作成
        xn::ImageGenerator image;
        rc = context.FindExistingNode(XN_NODE_TYPE_IMAGE, image);
        if (rc != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(rc));
        }
        
        // デプスジェネレータの作成
        xn::DepthGenerator depth;
        rc = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
        if (rc != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(rc));
        }
        
        // デプスの座標をイメージに合わせる
        depth.GetAlternativeViewPointCap().SetViewPoint(image);
        
        // ユーザーの作成
        xn::UserGenerator user;
        rc = context.FindExistingNode( XN_NODE_TYPE_USER, user );
        if ( rc != XN_STATUS_OK ) {
            rc = user.Create(context);
            if ( rc != XN_STATUS_OK ) {
                throw std::runtime_error( xnGetStatusString( rc ) );
            }
        }
        
        // ユーザー検出機能をサポートしているか確認
        if (!user.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
            throw std::runtime_error("ユーザー検出をサポートしてません");
        }
        
        XnCallbackHandle userCallbacks, calibrationCallbacks, poseCallbacks;
        XnChar pose[20] = "";
        
        // キャリブレーションにポーズが必要
        xn::SkeletonCapability skelton = user.GetSkeletonCap();
        if (skelton.NeedPoseForCalibration()) {
            // ポーズ検出のサポートチェック
            if (!user.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) {
                throw std::runtime_error("ポーズ検出をサポートしてません");
            }
            
            // キャリブレーションポーズの取得
            skelton.GetCalibrationPose(pose);
            
            // ポーズ検出のコールバックを登録
            xn::PoseDetectionCapability pose = user.GetPoseDetectionCap();
            pose.RegisterToPoseCallbacks(&::PoseDetected, &::PoseLost,
                                         &user, poseCallbacks);
        }
        
        // ユーザー認識のコールバックを登録
        user.RegisterUserCallbacks(&::UserDetected, &::UserLost, pose,
                                   userCallbacks);
        
        // キャリブレーションのコールバックを登録
        skelton.RegisterCalibrationCallbacks(&::CalibrationStart, &::CalibrationEnd,
                                             &user, calibrationCallbacks);
        
        // ユーザートラッキングで、すべてをトラッキングする
        skelton.SetSkeletonProfile(XN_SKEL_PROFILE_ALL);
        
        // ジェスチャー検出の開始
        context.StartGeneratingAll();
        
        // カメラサイズのイメージを作成(8bitのRGB)
        XnMapOutputMode outputMode;
        image.GetMapOutputMode(outputMode);
        camera = ::cvCreateImage(cvSize(outputMode.nXRes, outputMode.nYRes),
                                 IPL_DEPTH_8U, 3);
        if (!camera) {
            throw std::runtime_error("error : cvCreateImage");
        }
        
        // 表示状態
        bool isShowImage = true;
        bool isShowUser = true;
        bool isShowSkelton = true;
        

        // 描画状態
        static enum State{
            IDLE = 0,
            CIRCLE,
        } state[15] = { IDLE };

        typedef std::vector<XnPoint3D> line;
        line points;
        CvScalar color = CV_RGB(0,0,0);
        
        typedef std::vector<CvScalar> LineColors;
        LineColors colors;
        colors.push_back(CV_RGB(255,255,255));
        colors.push_back(CV_RGB(255,255,255));
        colors.push_back(CV_RGB(0,0,0));
        colors.push_back(CV_RGB(255,0,0));
        colors.push_back(CV_RGB(0,255,0));
        colors.push_back(CV_RGB(0,0,255));
        colors.push_back(CV_RGB(255,255,0));
        colors.push_back(CV_RGB(0,255,255));
        colors.push_back(CV_RGB(255,0,255));
        
        // メインループ
        while (1) {
            // すべてのノードの更新を待つ
            context.WaitAndUpdateAll();
            
            // 画像データの取得
            xn::ImageMetaData imageMD;
            image.GetMetaData(imageMD);
            
            // ユーザーデータの取得
            xn::SceneMetaData sceneMD;
            user.GetUserPixels(0, sceneMD);
            
            // カメラ画像の表示
            char* dest = camera->imageData;
            const xn::RGB24Map& rgb = imageMD.RGB24Map();
            for (int y = 0; y < imageMD.YRes(); ++y) {
                for (int x = 0; x < imageMD.XRes(); ++x) {
                    // ユーザー表示
                    XnLabel label = sceneMD(x, y);
                    if (!isShowUser) {
                        label = 0;
                    }
                    
                    // カメラ画像の表示
                    XnRGB24Pixel pixel = rgb(x, y);
                    if (!isShowImage) {
                        pixel = xnRGB24Pixel( 255, 255, 255 );
                    }
                    
                    // 出力先に描画
                    dest[0] = pixel.nRed   * Colors[label][0];
                    dest[1] = pixel.nGreen * Colors[label][1];
                    dest[2] = pixel.nBlue  * Colors[label][2];
                    dest += 3;
                }
            }
            
            // スケルトンの描画
            if (isShowSkelton) {
                XnUserID aUsers[15];
                XnUInt16 nUsers = 15;
                user.GetUsers(aUsers, nUsers);
//                for (int i = 0; i < nUsers; ++i) {
                for (int i = 0; i < 1; ++i) {
                    if (skelton.IsTracking(aUsers[i])) {
                        SkeltonDrawer skeltonDrawer(camera, skelton,
                                                    depth, aUsers[i]);
                        skeltonDrawer.draw();

                        // 右手と右肩の距離を表示
                        XnSkeletonJointPosition shoulder, l_shoulder, r_hand, l_hand;
                        skelton.GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_SHOULDER, shoulder);
                        skelton.GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_HAND, r_hand);
                        skelton.GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_SHOULDER, l_shoulder);
                        skelton.GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_HAND, l_hand);

                        // 現実の座標を画面座標に変換する
                        XnPoint3D pt_r_hand, pt_l_hand;
                        depth.ConvertRealWorldToProjective(1, &r_hand.position, &pt_r_hand);
                        depth.ConvertRealWorldToProjective(1, &l_hand.position, &pt_l_hand);

                        // 右肩と右手の間隔が40cm以上(手を前に出してる感じ)
                        if ((shoulder.position.Z - r_hand.position.Z) >= 400) {
                            std::cout << " CIRCLE";
                            state[i] = CIRCLE;
                        }
                        // 右肩と右手の間隔が40cm以上(手を前に出してる感じ)
                        if ((l_shoulder.position.Z - l_hand.position.Z) >= 400) {
                            std::cout << " IDLE";
                            state[i] = IDLE;
                        }
                        
                        
                        // 左上に左手を持ってたら色を変える
                        int r = 50;
                        for (int c = 0; c < colors.size(); ++c) {
                            cvRectangle(camera, cvPoint(c * r, 0), cvPoint((c+1)*r, r), colors[c], CV_FILLED);
                        }
                        if (pt_l_hand.Y < r) {
                            int index = pt_l_hand.X / r;
                            if (index == 0) {
                                points.clear();
                            }
                            else if (index < colors.size()) {
                                std::cout << " change color";
                                color = colors[index];
                            }
                        }
                        
                        std::cout << std::endl;

                        if (state[i] == CIRCLE) {
                            points.push_back(pt_r_hand);
                            cvCircle(camera, cvPoint(pt_r_hand.X, pt_r_hand.Y), 10, CV_RGB(255, 255, 0), 5);
                        }

                        // 線を書く
                        for (line::iterator it = points.begin();it != points.end();){
                            CvPoint pt1 = cvPoint(it->X, it->Y);
                            CvPoint pt2 = cvPoint(it->X, it->Y);
                            if (++it != points.end()) {
                                pt2 = cvPoint(it->X, it->Y);
                            }
                            
                            ::cvLine(camera, pt1, pt2, color, 3);
                        }
                    }
                }
            }
            
            ::cvCvtColor(camera, camera, CV_BGR2RGB);
            ::cvShowImage("KinectImage", camera);
            
            // キーイベント
            char key = cvWaitKey(10);
            // 終了する
            if (key == 'q') {
                break;
            }
            // 反転する
            else if (key == 'm') {
                context.SetGlobalMirror(!context.GetGlobalMirror());
            }
            // 表示する/しないの切り替え
            else if (key == 'i') {
                isShowImage = !isShowImage;
            }
            else if (key == 'u') {
                isShowUser = !isShowUser;
            }
            else if (key == 's') {
                isShowSkelton = !isShowSkelton;
            }
        }
    }
    catch (std::exception& ex) {
        std::cout << ex.what() << std::endl;
    }
    
    ::cvReleaseImage(&camera);
    
    return 0;
}
Пример #4
0
int main(int argc, char **argv) {
	ros::init(argc, argv, "PersonTracker");
	ros::NodeHandle nh;
	ros::Rate loop_rate(10);
    ROS_INFO("Initalizing Arm Connection....");
    ros::Publisher leftArmPublisher = nh.advertise<prlite_kinematics::SphereCoordinate>("/armik/n0/position", 1000);
    ros::Publisher rightArmPublisher = nh.advertise<prlite_kinematics::SphereCoordinate>("/armik/n1/position", 1000);
	yourmom = nh.advertise<prlite_kinematics::some_status_thing>("kinect_hack_status", 1000);
    ros::Duration(2.0).sleep();
    prlite_kinematics::SphereCoordinate coord;
	prlite_kinematics::some_status_thing status;
    coord.radius = 35.0;
    coord.phi = 0.0;
    coord.theta = 0.0;
	status.lulz = 0;	//initialized/reset
	yourmom.publish(status);
    leftArmPublisher.publish(coord);
    rightArmPublisher.publish(coord);
	ROS_INFO("Initalizing Kinect + NITE....");
	XnStatus nRetVal = XN_STATUS_OK;
	xn::EnumerationErrors errors;
	nRetVal = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, &errors);
	
	if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
	{
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		printf("%s\n", strError);
		return (nRetVal);
	} else if (nRetVal != XN_STATUS_OK) {
		printf("Open failed: %s\n", xnGetStatusString(nRetVal));
		return (nRetVal);
	}

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(nRetVal, "Find depth generator");
	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

	XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
	{
		printf("Supplied user generator doesn't support skeleton\n");
		return 1;
	}
	g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);

	if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
	{
		g_bNeedPose = TRUE;
		if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
		{
			printf("Pose required, but not supported\n");
			return 1;
		}
		g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);
		g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");
    ROS_INFO("Ready To Go!\n");

	while (ros::ok()) {
		// Update OpenNI
		g_Context.WaitAndUpdateAll();
		xn::SceneMetaData sceneMD;
		xn::DepthMetaData depthMD;
		g_DepthGenerator.GetMetaData(depthMD);
		g_UserGenerator.GetUserPixels(0, sceneMD);
		// Print positions
		XnUserID aUsers[15];
		XnUInt16 nUsers = 15;
		g_UserGenerator.GetUsers(aUsers, nUsers);
		for (int i = 0; i < nUsers; ++i) {
			if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])) {
				// Read joint positions for person (No WRISTs)
				XnSkeletonJointPosition torsoPosition, lShoulderPosition, rShoulderPosition, neckPosition, headPosition, lElbowPosition, rElbowPosition;
                XnSkeletonJointPosition rWristPosition, lWristPosition, rHipPosition, lHipPosition, lKneePosition, rKneePosition;
                XnSkeletonJointPosition lFootPosition, rFootPosition;
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_TORSO, torsoPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_NECK, neckPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_HEAD, headPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_SHOULDER, lShoulderPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_SHOULDER, rShoulderPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_ELBOW, lElbowPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_ELBOW, rElbowPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_HAND, lWristPosition);
				g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_HAND, rWristPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_HIP, lHipPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_HIP, rHipPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_KNEE, lKneePosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_KNEE, rKneePosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_LEFT_FOOT, lFootPosition);
                g_UserGenerator.GetSkeletonCap().GetSkeletonJointPosition(aUsers[i], XN_SKEL_RIGHT_FOOT, rFootPosition);

                // Read Joint Orientations
                XnSkeletonJointOrientation torsoOrientation, lShoulderOrientation, rShoulderOrientation, lHipOrientation, rHipOrientation;
                XnSkeletonJointOrientation lWristOrientation, rWristOrientation, lElbowOrientation, rElbowOrientation;
                XnSkeletonJointOrientation headOrientation, neckOrientation;
                XnSkeletonJointOrientation lKneeOrientation, rKneeOrientation, lFootOrientation, rFootOrientation;
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_TORSO, torsoOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_HEAD, headOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_NECK, neckOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_SHOULDER, lShoulderOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_SHOULDER, rShoulderOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_HIP, lHipOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_HIP, rHipOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_ELBOW, lElbowOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_ELBOW, rElbowOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_HAND, lWristOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_HAND, rWristOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_KNEE, lKneeOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_KNEE, rKneeOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_LEFT_FOOT, lFootOrientation);
	            g_UserGenerator.GetSkeletonCap().GetSkeletonJointOrientation(aUsers[i], XN_SKEL_RIGHT_FOOT, rFootOrientation);

	            XnFloat* m = torsoOrientation.orientation.elements;
	            KDL::Rotation torsoO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lShoulderOrientation.orientation.elements;
                KDL::Rotation lShouldO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rShoulderOrientation.orientation.elements;
                KDL::Rotation rShouldO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lHipOrientation.orientation.elements;
                KDL::Rotation lHipO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rHipOrientation.orientation.elements;
                KDL::Rotation rHipO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = headOrientation.orientation.elements;
                KDL::Rotation headO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = neckOrientation.orientation.elements;
                KDL::Rotation neckO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lElbowOrientation.orientation.elements;
                KDL::Rotation lElbowO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rElbowOrientation.orientation.elements;
                KDL::Rotation rElbowO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lWristOrientation.orientation.elements;
                KDL::Rotation lWristO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rWristOrientation.orientation.elements;
                KDL::Rotation rWristO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lKneeOrientation.orientation.elements;
                KDL::Rotation lKneeO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rKneeOrientation.orientation.elements;
                KDL::Rotation rKneeO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = lFootOrientation.orientation.elements;
                KDL::Rotation lFootO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);
                m = rFootOrientation.orientation.elements;
                KDL::Rotation rFootO(m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8]);

	            double qx = 0.0, qy = 0.0, qz = 0.0, qw = 0.0;

				// Get Points in 3D space
				XnPoint3D torso = torsoPosition.position;
				XnPoint3D lShould = lShoulderPosition.position;
				XnPoint3D rShould = rShoulderPosition.position;
                XnPoint3D lElbow = lElbowPosition.position;
                XnPoint3D rElbow = rElbowPosition.position;
                XnPoint3D lWrist = lWristPosition.position;
                XnPoint3D rWrist = rWristPosition.position;
                XnPoint3D neck = neckPosition.position;
                XnPoint3D head = headPosition.position;
                XnPoint3D lHip = lHipPosition.position;
                XnPoint3D rHip = rHipPosition.position;
                XnPoint3D lKnee = lKneePosition.position;
                XnPoint3D rKnee = rKneePosition.position;
                XnPoint3D lFoot = lFootPosition.position;
                XnPoint3D rFoot = rFootPosition.position;

                // ---------- ARM CONTROLLER HACK ------------------

                // Calculate arm length of user
                double lenL = calc3DDist( lShould, lElbow ) + calc3DDist( lElbow, lWrist );
                double lenR = calc3DDist( rShould, rElbow ) + calc3DDist( rElbow, rWrist );
                double distL = calc3DDist(lShould, lWrist);
                double distR = calc3DDist(rShould, rWrist);

                // Calculate positions
                prlite_kinematics::SphereCoordinate lCoord;
                prlite_kinematics::SphereCoordinate rCoord;

                lCoord.radius = 35 * (distL / lenL);
                rCoord.radius = 35 * (distR / lenR);

                lCoord.theta = -atan2(lShould.X - lWrist.X, lShould.Z - lWrist.Z);
                rCoord.theta = -atan2(rShould.X - rWrist.X, rShould.Z - rWrist.Z);
                if(lCoord.theta > 1.0) lCoord.theta = 1.0;
                if(lCoord.theta < -1.0) lCoord.theta = -1.0;
                if(rCoord.theta > 1.0) rCoord.theta = 1.0;
                if(rCoord.theta < -1.0) rCoord.theta = -1.0;

                lCoord.phi = -atan2(lShould.Y - lWrist.Y, lShould.X - lWrist.X);
                rCoord.phi = -atan2(rShould.Y - rWrist.Y, rShould.X - rWrist.X);
                if(lCoord.phi > 1.25) lCoord.phi = 1.25;
                if(lCoord.phi < -0.33) lCoord.phi = -0.33;
                if(rCoord.phi > 1.2) rCoord.phi = 1.25;
                if(rCoord.phi < -0.33) rCoord.phi = -0.33;

                ROS_INFO("User %d: Left (%lf,%lf,%lf), Right (%lf,%lf,%lf)", i, lCoord.radius, lCoord.theta, lCoord.phi, rCoord.radius, rCoord.theta, rCoord.phi);

                // Publish to arms
                leftArmPublisher.publish(rCoord);
                rightArmPublisher.publish(lCoord);

                // ---------- END HACK -----------------

                // Publish Transform
                static tf::TransformBroadcaster br;
                tf::Transform transform;
                std::stringstream body_name, neck_name, lshould_name, rshould_name, head_name, relbow_name, lelbow_name, lwrist_name, rwrist_name;
                std::stringstream lhip_name, rhip_name, lknee_name, rknee_name, lfoot_name, rfoot_name;
                body_name << "Torso (" << i << ")" << std::ends;
                neck_name << "Neck (" << i << ")" << std::ends;
                head_name << "Head (" << i << ")" << std::ends;
                lshould_name << "Shoulder L (" << i << ")" << std::ends;
                rshould_name << "Shoulder R (" << i << ")" << std::ends;
                lelbow_name << "Elbow L (" << i << ")" << std::ends;
                relbow_name << "Elbow R (" << i << ")" << std::ends;
                lwrist_name << "Wrist L (" << i << ")" << std::ends;
                rwrist_name << "Wrist R (" << i << ")" << std::ends;
                lhip_name << "Hip L (" << i << ")" << std::ends;
                rhip_name << "Hip R (" << i << ")" << std::ends;
                lknee_name << "Knee L (" << i << ")" << std::ends;
                rknee_name << "Knee R (" << i << ")" << std::ends;
                lfoot_name << "Foot L (" << i << ")" << std::ends;
                rfoot_name << "Foot R (" << i << ")" << std::ends;

                // Publish Torso
                torsoO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(torso.X / ADJUST_VALUE, torso.Y / ADJUST_VALUE, torso.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", body_name.str().c_str()));  
                // Publish Left Shoulder
                lShouldO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lShould.X / ADJUST_VALUE, lShould.Y / ADJUST_VALUE, lShould.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lshould_name.str().c_str()));
                // Publish Right Shoulder
                rShouldO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rShould.X / ADJUST_VALUE, rShould.Y / ADJUST_VALUE, rShould.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rshould_name.str().c_str())); 
                // Publish Left Elbow
                lElbowO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lElbow.X / ADJUST_VALUE, lElbow.Y / ADJUST_VALUE, lElbow.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lelbow_name.str().c_str()));
                // Publish Right Elbow
                rElbowO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rElbow.X / ADJUST_VALUE, rElbow.Y / ADJUST_VALUE, rElbow.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", relbow_name.str().c_str()));
                // Publish Left Wrist
                lWristO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lWrist.X / ADJUST_VALUE, lWrist.Y / ADJUST_VALUE, lWrist.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lwrist_name.str().c_str()));
                // Publish Right Wrist
                rWristO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rWrist.X / ADJUST_VALUE, rWrist.Y / ADJUST_VALUE, rWrist.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rwrist_name.str().c_str()));
                // Publish Neck
                neckO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(neck.X / ADJUST_VALUE, neck.Y / ADJUST_VALUE, neck.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", neck_name.str().c_str())); 
                // Publish Head
                headO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(head.X / ADJUST_VALUE, head.Y / ADJUST_VALUE, head.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", head_name.str().c_str()));
                // Publish Left Hip
                lHipO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lHip.X / ADJUST_VALUE, lHip.Y / ADJUST_VALUE, lHip.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lhip_name.str().c_str()));
                // Publish Right Hip
                rHipO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rHip.X / ADJUST_VALUE, rHip.Y / ADJUST_VALUE, rHip.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rhip_name.str().c_str())); 
                // Publish Left Knee
                lKneeO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lKnee.X / ADJUST_VALUE, lKnee.Y / ADJUST_VALUE, lKnee.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lknee_name.str().c_str()));
                // Publish Right Knee
                rKneeO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rKnee.X / ADJUST_VALUE, rKnee.Y / ADJUST_VALUE, rKnee.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rknee_name.str().c_str())); 
                // Publish Left Foot
                lFootO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(lFoot.X / ADJUST_VALUE, lFoot.Y / ADJUST_VALUE, lFoot.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", lfoot_name.str().c_str()));
                // Publish Right Foot
                rFootO.GetQuaternion(qx, qy, qz, qw);
                transform.setOrigin(tf::Vector3(rFoot.X / ADJUST_VALUE, rFoot.Y / ADJUST_VALUE, rFoot.Z / ADJUST_VALUE));
                transform.setRotation(tf::Quaternion(qx, qy, qz, qw));
                br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", rfoot_name.str().c_str()));
			}
		}
		ros::spinOnce();
		//loop_rate.sleep();
	}
	g_Context.Shutdown();
	return 0;
}
Пример #5
0
int main(int argc, char **argv)
{
	XnStatus nRetVal = XN_STATUS_OK;

	if (argc > 1)
	{
		nRetVal = g_Context.Init();
		CHECK_RC(nRetVal, "Init");
		nRetVal = g_Context.OpenFileRecording(argv[1], g_Player);
		if (nRetVal != XN_STATUS_OK)
		{
			printf("Can't open recording %s: %s\n", argv[1], xnGetStatusString(nRetVal));
			return 1;
		}
	}
	else
	{
		xn::EnumerationErrors errors;
		nRetVal = g_Context.InitFromXmlFile(SAMPLE_XML_PATH, g_scriptNode, &errors);
		if (nRetVal == XN_STATUS_NO_NODE_PRESENT)
		{
			XnChar strError[1024];
			errors.ToString(strError, 1024);
			printf("%s\n", strError);
			return (nRetVal);
		}
		else if (nRetVal != XN_STATUS_OK)
		{
			printf("Open failed: %s\n", xnGetStatusString(nRetVal));
			return (nRetVal);
		}
	}

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		printf("No depth generator found. Using a default one...");
		xn::MockDepthGenerator mockDepth;
		nRetVal = mockDepth.Create(g_Context);
		CHECK_RC(nRetVal, "Create mock depth");

		// set some defaults
		XnMapOutputMode defaultMode;
		defaultMode.nXRes = 320;
		defaultMode.nYRes = 240;
		defaultMode.nFPS = 30;
		nRetVal = mockDepth.SetMapOutputMode(defaultMode);
		CHECK_RC(nRetVal, "set default mode");

		// set FOV
		XnFieldOfView fov;
		fov.fHFOV = 1.0225999419141749;
		fov.fVFOV = 0.79661567681716894;
		nRetVal = mockDepth.SetGeneralProperty(XN_PROP_FIELD_OF_VIEW, sizeof(fov), &fov);
		CHECK_RC(nRetVal, "set FOV");

		XnUInt32 nDataSize = defaultMode.nXRes * defaultMode.nYRes * sizeof(XnDepthPixel);
		XnDepthPixel* pData = (XnDepthPixel*)xnOSCallocAligned(nDataSize, 1, XN_DEFAULT_MEM_ALIGN);

		nRetVal = mockDepth.SetData(1, 0, nDataSize, pData);
		CHECK_RC(nRetVal, "set empty depth map");

		g_DepthGenerator = mockDepth;
	}

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

	XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected, hCalibrationInProgress, hPoseInProgress;
	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
	{
		printf("Supplied user generator doesn't support skeleton\n");
		return 1;
	}
	nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
	CHECK_RC(nRetVal, "Register to user callbacks");
	nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart);
	CHECK_RC(nRetVal, "Register to calibration start");
	nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete);
	CHECK_RC(nRetVal, "Register to calibration complete");

	if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
	{
		g_bNeedPose = TRUE;
		if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
		{
			printf("Pose required, but not supported\n");
			return 1;
		}
		nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected);
		CHECK_RC(nRetVal, "Register to Pose Detected");
		g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);

		nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseInProgress(MyPoseInProgress, NULL, hPoseInProgress);
		CHECK_RC(nRetVal, "Register to pose in progress");
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationInProgress(MyCalibrationInProgress, NULL, hCalibrationInProgress);
	CHECK_RC(nRetVal, "Register to calibration in progress");

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");

#ifndef USE_GLES
	glInit(&argc, argv);
	glutMainLoop();
#else
	if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context))
	{
		printf("Error initializing opengles\n");
		CleanupExit();
	}

	glDisable(GL_DEPTH_TEST);
	glEnable(GL_TEXTURE_2D);
	glEnableClientState(GL_VERTEX_ARRAY);
	glDisableClientState(GL_COLOR_ARRAY);

	while (!g_bQuit)
	{
		glutDisplay();
		eglSwapBuffers(display, surface);
	}
	opengles_shutdown(display, surface, context);

	CleanupExit();
#endif
}
Пример #6
0
XnStatus XnDeviceManagerLoadAllDevices(const XnChar* strDir)
{
	XnStatus nRetVal = XN_STATUS_OK;

	XnChar cpSearchString[XN_FILE_MAX_PATH] = "";

	if (strDir == NULL)
	{
		strDir = XN_FILE_LOCAL_DIR;
	}

	// Build the search pattern string
	XN_VALIDATE_STR_APPEND(cpSearchString, strDir, XN_FILE_MAX_PATH, nRetVal);
	XN_VALIDATE_STR_APPEND(cpSearchString, XN_FILE_DIR_SEP, XN_FILE_MAX_PATH, nRetVal);
	XN_VALIDATE_STR_APPEND(cpSearchString, XN_SHARED_LIBRARY_PREFIX, XN_FILE_MAX_PATH, nRetVal);
	XN_VALIDATE_STR_APPEND(cpSearchString, XN_DEVICE_FILE_PREFIX, XN_FILE_MAX_PATH, nRetVal);
	XN_VALIDATE_STR_APPEND(cpSearchString, XN_FILE_ALL_WILDCARD, XN_FILE_MAX_PATH, nRetVal);
	XN_VALIDATE_STR_APPEND(cpSearchString, XN_SHARED_LIBRARY_POSTFIX, XN_FILE_MAX_PATH, nRetVal);

	// Get a file list of Xiron devices
	XnChar acsFileList[XN_DEVICE_MANAGER_MAX_NUMBER_OF_DEVICES][XN_FILE_MAX_PATH];
	XnUInt32 nFileCount = 0;

	xnLogVerbose(XN_MASK_DEVICE_MANAGER, "Searching for %s...", cpSearchString);
	nRetVal = xnOSGetFileList(cpSearchString, NULL, acsFileList, XN_DEVICE_MANAGER_MAX_NUMBER_OF_DEVICES, &nFileCount);
	if ((nRetVal != XN_STATUS_OS_FILE_NOT_FOUND) && (nRetVal != XN_STATUS_OK))
	{
		return (nRetVal);
	}

	// now try to load each file
	for (XnUInt32 nIndex = 0; nIndex < nFileCount; ++nIndex)
	{
		xnLogVerbose(XN_MASK_DEVICE_MANAGER, "Trying to load a device '%s'...", acsFileList[nIndex]);
		nRetVal = XnDeviceManagerLoadDevice(acsFileList[nIndex], &g_pDeviceManager->aDevices[g_pDeviceManager->nDevicesCount]);
		if (nRetVal != XN_STATUS_OK)
		{
			xnLogWarning(XN_MASK_DEVICE_MANAGER, "'%s' is not a valid device: %s", acsFileList[nIndex], xnGetStatusString(nRetVal));
		}
		else
		{
			xnLogInfo(XN_MASK_DEVICE_MANAGER, "device '%s' loaded.", acsFileList[nIndex]);
			g_pDeviceManager->nDevicesCount++;
		}
	}

	return (XN_STATUS_OK);
}
Пример #7
0
OniStatus Context::initialize()
{
	XnBool repositoryOverridden = FALSE;
	XnChar repositoryFromINI[XN_FILE_MAX_PATH] = {0};

	m_initializationCounter++;
	if (m_initializationCounter > 1)
	{
		xnLogVerbose(XN_MASK_ONI_CONTEXT, "Initialize: Already initialized");
		return ONI_STATUS_OK;
	}

	XnStatus rc;

	XnChar strModulePath[XN_FILE_MAX_PATH];
	rc = xnOSGetModulePathForProcAddress(reinterpret_cast<void*>(&dummyFunctionToTakeAddress), strModulePath);
	if (rc != XN_STATUS_OK)
	{
		m_errorLogger.Append("Couldn't get the OpenNI shared library module's path: %s", xnGetStatusString(rc));
		return OniStatusFromXnStatus(rc);
	}

	XnChar strBaseDir[XN_FILE_MAX_PATH];
	rc = xnOSGetDirName(strModulePath, strBaseDir, XN_FILE_MAX_PATH);
	if (rc != XN_STATUS_OK)
	{
		// Very unlikely to happen, but just in case.
		m_errorLogger.Append("Couldn't get the OpenNI shared library module's directory: %s", xnGetStatusString(rc));
		return OniStatusFromXnStatus(rc);
	}

	s_valid = TRUE;

	// Read configuration file

	XnChar strOniConfigurationFile[XN_FILE_MAX_PATH];
	XnBool configurationFileExists = FALSE;

	// Search the module directory for OpenNI.ini.
	xnOSStrCopy(strOniConfigurationFile, strBaseDir, XN_FILE_MAX_PATH);
	rc = xnOSAppendFilePath(strOniConfigurationFile, ONI_CONFIGURATION_FILE, XN_FILE_MAX_PATH);
	if (rc == XN_STATUS_OK)
	{
		xnOSDoesFileExist(strOniConfigurationFile, &configurationFileExists);
	}

#ifdef ONI_PLATFORM_ANDROID_OS
	xnLogSetMaskMinSeverity(XN_LOG_MASK_ALL, (XnLogSeverity)0);
	xnLogSetAndroidOutput(TRUE);
#endif
	
	if (configurationFileExists)
	{
		// First, we should process the log related configuration as early as possible.

		XnInt32 nValue;
		XnChar strLogPath[XN_FILE_MAX_PATH] = {0};

		//Test if log redirection is needed 
		rc = xnOSReadStringFromINI(strOniConfigurationFile, "Log", "LogPath", strLogPath, XN_FILE_MAX_PATH);
		if (rc == XN_STATUS_OK)
		{
			rc = xnLogSetOutputFolder(strLogPath);
			if (rc != XN_STATUS_OK)
			{
				xnLogWarning(XN_MASK_ONI_CONTEXT, "Failed to set log output folder: %s", xnGetStatusString(rc));
			}
			else
			{
				xnLogVerbose(XN_MASK_ONI_CONTEXT, "Log directory redirected to: %s", strLogPath);
			}
		}

		rc = xnOSReadIntFromINI(strOniConfigurationFile, "Log", "Verbosity", &nValue);
		if (rc == XN_STATUS_OK)
		{
			xnLogSetMaskMinSeverity(XN_LOG_MASK_ALL, (XnLogSeverity)nValue);
		}

		rc = xnOSReadIntFromINI(strOniConfigurationFile, "Log", "LogToConsole", &nValue);
		if (rc == XN_STATUS_OK)
		{
			xnLogSetConsoleOutput(nValue == 1);
		}
		
		rc = xnOSReadIntFromINI(strOniConfigurationFile, "Log", "LogToFile", &nValue);
		if (rc == XN_STATUS_OK)
		{
			xnLogSetFileOutput(nValue == 1);
		}

		// Then, process the other device configurations.

		rc = xnOSReadStringFromINI(strOniConfigurationFile, "Device", "Override", m_overrideDevice, XN_FILE_MAX_PATH);
		if (rc != XN_STATUS_OK)
		{
			xnLogVerbose(XN_MASK_ONI_CONTEXT, "No override device in configuration file");
		}

		rc = xnOSReadStringFromINI(strOniConfigurationFile, "Drivers", "Repository", repositoryFromINI, XN_FILE_MAX_PATH);
		if (rc == XN_STATUS_OK)
		{
			repositoryOverridden = TRUE;
		}



		xnLogVerbose(XN_MASK_ONI_CONTEXT, "Configuration has been read from '%s'", strOniConfigurationFile);
	}
	else
	{
		xnLogVerbose(XN_MASK_ONI_CONTEXT, "Couldn't find configuration file '%s'", strOniConfigurationFile);
	}

	xnLogVerbose(XN_MASK_ONI_CONTEXT, "OpenNI %s", ONI_VERSION_STRING);

	// Resolve the drive path based on the module's directory.
	XnChar strDriverPath[XN_FILE_MAX_PATH];
	xnOSStrCopy(strDriverPath, strBaseDir, XN_FILE_MAX_PATH);

	if (repositoryOverridden)
	{
		xnLogVerbose(XN_MASK_ONI_CONTEXT, "Extending the driver path by '%s', as configured in file '%s'", repositoryFromINI, strOniConfigurationFile);
		rc = xnOSAppendFilePath(strDriverPath, repositoryFromINI, XN_FILE_MAX_PATH);
	}
	else
	{
		rc = xnOSAppendFilePath(strDriverPath, ONI_DEFAULT_DRIVERS_REPOSITORY, XN_FILE_MAX_PATH);
	}

	if (rc != XN_STATUS_OK)
	{
		m_errorLogger.Append("The driver path gets too long");
		return OniStatusFromXnStatus(rc);
	}

	xnLogVerbose(XN_MASK_ONI_CONTEXT, "Using '%s' as driver path", strDriverPath);
	rc = loadLibraries(strDriverPath);

	if (rc == XN_STATUS_OK)
	{
		m_errorLogger.Clear();
	}

	return OniStatusFromXnStatus(rc);
}
Пример #8
0
XnStatus XnServerSession::HandleReadStream()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnChar strStreamName[XN_DEVICE_MAX_STRING_LENGTH];
	XnUInt32 nDataSize = XN_DEVICE_MAX_STRING_LENGTH;
	nRetVal = m_privateIncomingPacker.ReadCustomData(XN_SENSOR_SERVER_MESSAGE_READ_STREAM, strStreamName, &nDataSize);
	XN_IS_STATUS_OK(nRetVal);

	XnSensorServerReadReply reply;
	XnStatus nActionResult = ReadStreamImpl(strStreamName, &reply);
	if (nActionResult == XN_STATUS_OK)
	{
		m_pLogger->DumpMessage("Data", sizeof(reply), 0, strStreamName);
		nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_READ_STREAM, XN_STATUS_OK, sizeof(reply), &reply);
		XN_IS_STATUS_OK(nRetVal);
	}
	else
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to read stream '%s': %s", m_nID, strStreamName, xnGetStatusString(nActionResult));

		nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GENERAL_OP_RESPOND, nActionResult);
		XN_IS_STATUS_OK(nRetVal);
	}

	return (XN_STATUS_OK);
}
Пример #9
0
XnStatus XnServerSession::HandleCloseSession()
{
	XnStatus nRetVal = XN_STATUS_OK;
	
	xnLogVerbose(XN_MASK_SENSOR_SERVER, "Received BYE from client %u", m_nID);

	XnStatus nActionResult = CloseSessionImpl();
	XN_ASSERT(nActionResult == XN_STATUS_OK);
	XN_REFERENCE_VARIABLE(nActionResult);

	// client shouldn't care if close succeeded or not. always send OK.
	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_BYE, XN_STATUS_OK);
	if (nRetVal != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Failed to send BYE reply to client %u: %s", m_nID, xnGetStatusString(nRetVal));
	}

	return (XN_STATUS_OK);
}
Пример #10
0
XnStatus XnServerSession::HandleRemoveStream()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnChar strName[XN_DEVICE_MAX_STRING_LENGTH];
	nRetVal = m_privateIncomingPacker.ReadStreamRemoved(strName);
	XN_IS_STATUS_OK(nRetVal);

	xnLogVerbose(XN_MASK_SENSOR_SERVER, "Client %u requested to remove stream %s", m_nID, strName);
	XnStatus nActionResult = RemoveStreamImpl(strName);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to remove stream '%s': %s", m_nID, strName, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GENERAL_OP_RESPOND, nActionResult);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #11
0
XnStatus XnServerSession::HandleCloseStream()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnChar strStreamName[XN_DEVICE_MAX_STRING_LENGTH];
	XnUInt32 nDataSize = XN_DEVICE_MAX_STRING_LENGTH;
	XnUInt32 nType = XN_SENSOR_SERVER_MESSAGE_CLOSE_STREAM;
	nRetVal = m_privateIncomingPacker.ReadCustomData(nType, strStreamName, &nDataSize);
	XN_IS_STATUS_OK(nRetVal);

	XnStatus nActionResult = CloseStreamImpl(strStreamName);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to close stream '%s': %s", m_nID, strStreamName, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GENERAL_OP_RESPOND, nActionResult);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #12
0
XnStatus XnServerSession::HandleNewStream()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XN_PROPERTY_SET_CREATE_ON_STACK(props);

	XnChar strType[XN_DEVICE_MAX_STRING_LENGTH];
	XnChar strName[XN_DEVICE_MAX_STRING_LENGTH];
	nRetVal = m_privateIncomingPacker.ReadNewStream(strType, strName, &props);
	XN_IS_STATUS_OK(nRetVal);

	XnPropertySet* pInitialValues = &props;
	if (props.pData->Begin() == props.pData->End())
	{
		pInitialValues = NULL;
	}

	XnStatus nActionResult = NewStreamImpl(strType, strName, pInitialValues);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to create stream of type '%s': %s", m_nID, strType, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GENERAL_OP_RESPOND, nActionResult);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #13
0
XnStatus XnServerSession::HandleConfigFromINIFile()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnSensorServerMessageIniFile message;
	XnUInt32 nDataSize = sizeof(message);
	nRetVal = m_privateIncomingPacker.ReadCustomData(XN_SENSOR_SERVER_MESSAGE_INI_FILE, (XnUChar*)&message, &nDataSize);
	XN_IS_STATUS_OK(nRetVal);

	if (nDataSize != sizeof(message))
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_SENSOR_SERVER, "Sensor server protocol error - invalid size!");
	}

	// load
	XnStatus nActionResult = ConfigFromINIFileImpl(message.strFileName, message.strSectionName);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to config sensor from file '%s': %s", m_nID, message.strFileName, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GENERAL_OP_RESPOND, nActionResult);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #14
0
XnStatus XnServerSession::HandleGetGeneralProperty()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnUChar bufValue[XN_SENSOR_SERVER_MAX_REPLY_SIZE];
	XnSensorServerMessageGetPropertyRequest* pRequest = (XnSensorServerMessageGetPropertyRequest*)bufValue;
	XnUChar* pData = bufValue + sizeof(XnSensorServerMessageGetPropertyRequest);
	XnUInt32 nDataSize = XN_SENSOR_SERVER_MAX_REPLY_SIZE;
	nRetVal = m_privateIncomingPacker.ReadCustomData(XN_SENSOR_SERVER_MESSAGE_GET_GENERAL_PROPERTY, bufValue, &nDataSize);
	XN_IS_STATUS_OK(nRetVal);

	if (nDataSize < sizeof(XnSensorServerMessageGetPropertyRequest))
	{
		XN_LOG_WARNING_RETURN(XN_STATUS_ERROR, XN_MASK_SENSOR_SERVER, "Sensor server protocol error - invalid size!");
	}

	// get
	XnGeneralBuffer gbValue = XnGeneralBufferPack(pData, pRequest->nSize);
	XnStatus nActionResult = GetGeneralPropertyImpl(pRequest->strModuleName, pRequest->strPropertyName, gbValue);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to get property '%s.%s': %s", m_nID, pRequest->strModuleName, pRequest->strPropertyName, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GET_GENERAL_PROPERTY, nActionResult, pRequest->nSize, pData);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}
Пример #15
0
void OpenNIDevice::getAvailableModes () throw (OpenNIException)
{
  available_image_modes_.clear ();
  unique_lock<mutex> image_lock (image_mutex_);
  unsigned mode_count = image_generator_.GetSupportedMapOutputModesCount ();
  XnMapOutputMode* modes = new XnMapOutputMode[mode_count];
  XnStatus status = image_generator_.GetSupportedMapOutputModes (modes, mode_count);
  if (status != XN_STATUS_OK)
  {
    delete[] modes;
    THROW_OPENNI_EXCEPTION ("Could not enumerate image stream output modes. Reason: %s", xnGetStatusString (status));
  }
  image_lock.unlock ();

  for (unsigned modeIdx = 0; modeIdx < mode_count; ++modeIdx)
    available_image_modes_.push_back (modes[modeIdx]);
  delete[] modes;

  available_depth_modes_.clear ();
  unique_lock<mutex> depth_lock (depth_mutex_);
  mode_count = depth_generator_.GetSupportedMapOutputModesCount ();
  modes = new XnMapOutputMode[mode_count];
  status = depth_generator_.GetSupportedMapOutputModes (modes, mode_count);
  if (status != XN_STATUS_OK)
  {
    delete[] modes;
    THROW_OPENNI_EXCEPTION ("Could not enumerate depth stream output modes. Reason: %s", xnGetStatusString (status));
  }
  depth_lock.unlock ();

  for (unsigned modeIdx = 0; modeIdx < mode_count; ++modeIdx)
    available_depth_modes_.push_back (modes[modeIdx]);
  delete[] modes;
}
Пример #16
0
int OpenniWrapper::start()
{
	XnStatus rc;
	EnumerationErrors errors;
	hasRGB = 0;
	hasDepth = 0;

	rc = g_context.InitFromXmlFile(SAMPLE_XML_PATH, g_scriptNode, &errors);
	if (rc == XN_STATUS_NO_NODE_PRESENT)
	{
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		sprintf(buf, "%s\n", strError);
		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
		return (rc);
	}
	else if (rc != XN_STATUS_OK)
	{
		sprintf(buf, "Open failed: %s\n", xnGetStatusString(rc));
		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
		return (rc);
	}

	rc = g_context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth);
	hasDepth = 1;
	if (rc != XN_STATUS_OK)
	{
		sprintf(buf, "No depth node exists! Check your XML.");
		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
		hasDepth = 0;
		//return 1;
	}

	rc = g_context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image);
	hasRGB=1;
	if (rc != XN_STATUS_OK)
	{
		sprintf(buf, "No image node exists! Check your XML.");
		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
		hasRGB=0;
		//return 1;
	}

//	rc = g_depth.SetIntProperty ("OutputFormat", 0);
//	if (rc != XN_STATUS_OK)
//	{
//		sprintf(buf, "Cannot set depth generator property");
//		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
//		return 1;
//	}
	//for ASUS XTION ONLY
	//see: http://dev.pointclouds.org/projects/pcl/wiki/MacOSX
	rc = g_depth.SetIntProperty ("RegistrationType", 1);
	if (rc != XN_STATUS_OK)
	{
		sprintf(buf, "Cannot set depth generator property: RegistrationType");
		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
		return 1;
	}

	//obviously Kinect doesn't support anything else!?
//	XnMapOutputMode outputMode;
//
//	outputMode.nXRes = 640;
//	outputMode.nYRes = 480;
//	outputMode.nFPS = 30;
////
//	rc = g_depth.SetMapOutputMode(outputMode);
//	if (rc != XN_STATUS_OK)
//	{
//		sprintf(buf, "Cannot set depth generator property");
//		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
//		return 1;
//	}
////
//	rc = g_image.SetMapOutputMode(outputMode);
//	if (rc != XN_STATUS_OK)
//	{
//		sprintf(buf, "Cannot set image generator property");
//		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
//		return 1;
//	}
	// TODO: check error code
	if(hasDepth)
		g_depth.GetMetaData(g_depthMD);
	if(hasRGB)
		g_image.GetMetaData(g_imageMD);

	// Hybrid mode isn't supported in this sample
//	if (g_imageMD.FullXRes() != g_depthMD.FullXRes() || g_imageMD.FullYRes() != g_depthMD.FullYRes())
//	{
//		sprintf (buf, "The device depth and image resolution must be equal!\n");
//		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
//		return 1;
//	}

	//show some info of the device...
//	sprintf(buf, "Image Resolution: %d x %d", g_imageMD.FullXRes(), g_imageMD.FullYRes());
//	__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);

//	// RGB is the only image format supported.
//	if (g_imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
//	{
//		sprintf(buf, "The device image format must be RGB24\n");
//		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
//		return 1;
//	}

	//configure the global variable
	width  = g_depthMD.FullXRes();
	height  = g_depthMD.FullYRes();

	//g_image.StopGenerating();

	//init the tmp storage for the frames
	rgbImage = (unsigned char*)malloc(width*height*3*sizeof(unsigned char));
	pDepth = (unsigned short*)malloc(width*height*sizeof(unsigned short));

	//this will map the depth map to the rgb image by default. Of course we can turn this off
	//if we would like to process the depth map independently.
	//TODO: turning this off will cause the RGB image to corrupt? why?.glo
	if(g_depth.GetAlternativeViewPointCap().SetViewPoint(g_image)!=XN_STATUS_OK)
	{
		sprintf(buf, "Cannot set GetAlternativeViewPointCap() ");
		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
	}
	if(g_image.GetAlternativeViewPointCap().SetViewPoint(g_depth)!=XN_STATUS_OK){
		sprintf(buf, "Cannot set GetAlternativeViewPointCap() for g_image ");
		__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
	}
	//show some info of the device...
	sprintf(buf, "Finished OpenNI Initialization");
	__android_log_print(ANDROID_LOG_DEBUG, "OPENNI",  buf);
	return 0;
}
Пример #17
0
void OpenNIDevice::setDepthOutputMode (const XnMapOutputMode& output_mode) throw (OpenNIException)
{
  lock_guard<mutex> depth_lock (depth_mutex_);
  XnStatus status = depth_generator_.SetMapOutputMode (output_mode);
  if (status != XN_STATUS_OK)
    THROW_OPENNI_EXCEPTION ("Could not set depth stream output mode to %dx%d@%d. Reason: %s", output_mode.nXRes, output_mode.nYRes, output_mode.nFPS, xnGetStatusString (status));
}
Пример #18
0
int main(int argc, char ** argv)
{
    XnStatus rc = XN_STATUS_OK;
    xn::EnumerationErrors errors;

    // Configure
    rc = g_Context.InitFromXmlFile(SAMPLE_XML_FILE, g_ScriptNode, &errors);
    if (rc == XN_STATUS_NO_NODE_PRESENT)
    {
        XnChar strError[1024];
        errors.ToString(strError, 1024);
        printf("%s\n", strError);
        return (rc);
    }
    if (rc != XN_STATUS_OK)
    {
        printf("Couldn't initialize from file: %s\n", xnGetStatusString(rc));
        return 1;
    }

    // Create and initialize point tracker
    g_pSessionManager = new XnVSessionManager();
    rc = g_pSessionManager->Initialize(&g_Context, "Wave", "RaiseHand");
    if (rc != XN_STATUS_OK)
    {
        printf("Couldn't initialize the Session Manager: %s\n", xnGetStatusString(rc));
        CleanupExit();
    }

    g_pSessionManager->RegisterSession(NULL, &SessionStart, &SessionEnd);

    // Start catching signals for quit indications
    CatchSignals(&g_bQuit);

    // init and register circle control
    g_pCircle = new XnVCircleDetector;
    g_pCircle->RegisterCircle(NULL, &CircleCB);
    g_pCircle->RegisterNoCircle(NULL, &NoCircleCB);
    g_pCircle->RegisterPrimaryPointCreate(NULL, &Circle_PrimaryCreate);
    g_pCircle->RegisterPrimaryPointDestroy(NULL, &Circle_PrimaryDestroy);
    g_pSessionManager->AddListener(g_pCircle);

    SetCircle(true, 0);
    SetCircleColor(1,1,1);
    SetCircleLineColor(0.7,0.7,0.7);

    g_Context.StartGeneratingAll();

#ifdef USE_GLUT

    glInit(&argc, argv);
    glutMainLoop();

#else

    if (!opengles_init(GL_WIN_SIZE_X, GL_WIN_SIZE_Y, &display, &surface, &context))
    {
        printf("Error initing opengles\n");
        CleanupExit();
    }

    glDisable(GL_DEPTH_TEST);
    //glEnable(GL_TEXTURE_2D);

    glEnableClientState(GL_VERTEX_ARRAY);
    glDisableClientState(GL_COLOR_ARRAY);

    while ((!_kbhit()) && (!g_bQuit))
    {
        glutDisplay();
    }

    opengles_shutdown(display, surface, context);

    CleanupExit();

#endif
}
Пример #19
0
void xnLogConfigurationChanged()
{
	// if new configuration requires a log file, and we don't have one opened
	if (g_xnLoggerData.m_nLogFilteringType != XN_LOG_WRITE_NONE && g_xnLoggerData.m_bWriteToFile && g_xnLoggerData.m_fLogFile == XN_INVALID_FILE_HANDLE)
	{
		XN_PROCESS_ID nProcID = 0;
		xnOSGetCurrentProcessID(&nProcID);
		XnChar strFileName[XN_FILE_MAX_PATH];
		sprintf(strFileName, "%s%s_%u.log", g_xnLoggerData.m_csLogDir, g_xnLoggerData.m_csTime, nProcID);
		XnStatus nRetVal = xnLogCreateFile(strFileName, &g_xnLoggerData.m_fLogFile);
		if (nRetVal != XN_STATUS_OK)
		{
			// we don't have much to do if files can't be open. Logs will not be written to file
			printf("Couldn't create log file %s! Logs will not be written (error: %s)\n", strFileName, xnGetStatusString(nRetVal));
			g_xnLoggerData.m_fLogFile = XN_INVALID_FILE_HANDLE;
			g_xnLoggerData.m_bWriteToFile = FALSE;
		}
	}

	if (!g_xnLoggerData.m_bBannerPrinted && xnLogIsEnabled(XN_MASK_LOG, XN_LOG_INFO))
	{
		xnLogInfo(XN_MASK_LOG, "OpenNI version is %s", XN_VERSION_STRING);
		g_xnLoggerData.m_bBannerPrinted = TRUE;
	}

	if (g_xnLoggerData.m_fLogFile != XN_INVALID_FILE_HANDLE)
	{
		XnChar csMasks[XN_LOG_MASKS_STRING_LEN];
		xnLogGetMasksString(csMasks);
		xnLogWriteImpl(XN_MASK_LOG, XN_LOG_INFO, __FILE__, __LINE__, "Log system initialized. Console: %d, File: %d, Severity: %s, Masks: %s",
			g_xnLoggerData.m_bWriteToConsole, g_xnLoggerData.m_bWriteToFile, xnLogGetSeverityString(g_xnLoggerData.m_nFilterSeverity), csMasks);
	}
}
Пример #20
0
int main ( int argc, char * argv[] )
{
	// 
	// Initialize OpenNI Settings
	// 

	XnStatus nRetVal = XN_STATUS_OK;
	xn::ScriptNode scriptNode;
	xn::EnumerationErrors errors;

	// 
	// Initialize Context Object
	// 

	nRetVal = g_Context.InitFromXmlFile ( CONFIG_XML_PATH, scriptNode, &errors );
	
	if ( nRetVal == XN_STATUS_NO_NODE_PRESENT ) {
		XnChar strError[1024];
		errors.ToString(strError, 1024);
		printf ( "XN_STATUS_NO_NODE_PRESENT:\n%s\n", strError );
		system ( "pause" );

		return ( nRetVal );
	}
	else if ( nRetVal != XN_STATUS_OK ) {
		printf ( "Open failed: %s\n", xnGetStatusString(nRetVal) );	
		system ( "pause" );

		return ( nRetVal );
	}

	// 
	// Handle Image & Depth Generator Node
	// 

	bool colorFlag = true;
	bool depthFlag = true;

	nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_DEPTH, g_DepthGen );
	if ( nRetVal != XN_STATUS_OK ) {
		printf("No depth node exists!\n");
		depthFlag = false;
	}
	nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_IMAGE, g_ImageGen );
	if ( nRetVal != XN_STATUS_OK ) {
		printf("No image node exists!\n");
		colorFlag = false;
	}

	// g_DepthGen.GetAlternativeViewPointCap().SetViewPoint( g_ImageGen );

	if ( depthFlag ) {
		g_DepthGen.GetMetaData ( g_DepthMD );
		assert ( g_DepthMD.PixelFormat() == XN_PIXEL_FORMAT_GRAYSCALE_16_BIT );
	}
	if ( colorFlag ) {
		g_ImageGen.GetMetaData ( g_ImageMD );
		assert ( g_ImageMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24 );
	}

	g_DepthImgShow = cv::Mat ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_8UC1  );
	g_DepthImgMat  = cv::Mat ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_16UC1 );
	g_ColorImgMat  = cv::Mat ( g_ImageMD.YRes(), g_ImageMD.XRes(), CV_8UC3  );
	
	// 
	// Start to Loop
	// 

	bool flipColor = true;
	int ctlWndKey = -1;

	g_StartTickCount = GetTickCount();
	g_HeadTrackingFrameCount = 0;
	
	while ( ctlWndKey != ESC_KEY_VALUE ) 
	{
		nRetVal = g_Context.WaitOneUpdateAll ( g_DepthGen );
		// nRetVal = g_Context.WaitAnyUpdateAll();

#ifdef HANDLING_IMAGE_DATA

		if ( colorFlag ) 
		{
			g_ImageGen.GetMetaData ( g_ImageMD );

			assert ( g_ImageMD.FullXRes() == g_ImageMD.XRes() );
			assert ( g_ImageMD.FullYRes() == g_ImageMD.YRes() );

			GlobalUtility::CopyColorRawBufToCvMat8uc3 ( (const XnRGB24Pixel *)(g_ImageMD.Data()), g_ColorImgMat );
	
			if ( ctlWndKey == 's' || ctlWndKey == 'S' ) {												// Switch
				flipColor = !flipColor;
			}
			if ( flipColor ) {
				cv::cvtColor ( g_ColorImgMat, g_ColorImgMat, CV_RGB2BGR );
			}

			cv::namedWindow ( IMAGE_WIN_NAME, CV_WINDOW_AUTOSIZE );
			cv::imshow ( IMAGE_WIN_NAME, g_ColorImgMat );
		}

#endif

#ifdef HANDLING_DEPTH_DATA

		if ( depthFlag ) 
		{
			g_DepthGen.GetMetaData(g_DepthMD);
		
			// assert ( g_DepthMD.FullXRes() == g_DepthMD.XRes() );
			// assert ( g_DepthMD.FullYRes() == g_DepthMD.YRes() );

			GlobalUtility::CopyDepthRawBufToCvMat16u ( (const XnDepthPixel *)(g_DepthMD.Data()), g_DepthImgMat );
			GlobalUtility::ConvertDepthCvMat16uToGrayCvMat ( g_DepthImgMat, g_DepthImgShow );

			/*
			cv::putText(colorImgMat, 
						GlobalUtility::DoubleToString(g_ImageMD.FPS()) + " FPS", 
						cv::Point(10, 450), 
						cv::FONT_ITALIC, 
						0.7, 
						cv::Scalar(255, 255, 255, 0),
						2,
						8,
						false);
						*/

			cv::namedWindow ( DEPTH_WIN_NAME, CV_WINDOW_AUTOSIZE );
			cv::imshow ( DEPTH_WIN_NAME, g_DepthImgShow );
		}

#endif

		XnFieldOfView fov;
		g_DepthGen.GetFieldOfView( fov );

		std::cout	<< "HFov = " << fov.fHFOV << std::endl
					<< "VFov = " << fov.fVFOV << std::endl;

		ctlWndKey = cvWaitKey ( 5 );

		g_HeadTrackingFrameCount++;
		g_CurrTickCount = GetTickCount();
		std::cout	<< "FPS = " 
					<< 1000 / ( ( double )( g_CurrTickCount - g_StartTickCount ) / ( double )( g_HeadTrackingFrameCount ) ) 
					<< std::endl;
	}

	g_Context.Release ();

	exit ( EXIT_SUCCESS );
}
int main(int argc, char **argv)
{
   sleep(10);
   ros::init(argc, argv, "skel_tracker");
   ros::NodeHandle nh_;

   // Read the device_id parameter from the server
   int device_id;
//   param_nh.param ("device_id", device_id, argc > 1 ? atoi (argv[1]) : 0);

   pmap_pub = nh_.advertise<mapping_msgs::PolygonalMap> ("skeletonpmaps", 100);
   skel_pub = nh_.advertise<body_msgs::Skeletons> ("skeletons", 100);
	XnStatus nRetVal = XN_STATUS_OK;

	if (argc > 1)
	{
		nRetVal = g_Context.Init();
		CHECK_RC(nRetVal, "Init");
		nRetVal = g_Context.OpenFileRecording(argv[1]);
		if (nRetVal != XN_STATUS_OK)
		{
			printf("Can't open recording %s: %s\n", argv[1], xnGetStatusString(nRetVal));
			return 1;
		}
	}
	else
	{

	   std::string configFilename = ros::package::getPath("openni_tracker") + "/openni_tracker.xml";
		nRetVal = g_Context.InitFromXmlFile(configFilename.c_str());
		CHECK_RC(nRetVal, "InitFromXml");
	}

	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);
	CHECK_RC(nRetVal, "Find depth generator");
	nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);
	if (nRetVal != XN_STATUS_OK)
	{
		nRetVal = g_UserGenerator.Create(g_Context);
		CHECK_RC(nRetVal, "Find user generator");
	}

	XnCallbackHandle hUserCallbacks, hCalibrationCallbacks, hPoseCallbacks;
	if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
	{
		printf("Supplied user generator doesn't support skeleton\n");
		return 1;
	}
	g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
	g_UserGenerator.GetSkeletonCap().RegisterCalibrationCallbacks(UserCalibration_CalibrationStart, UserCalibration_CalibrationEnd, NULL, hCalibrationCallbacks);

	if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration())
	{
		g_bNeedPose = TRUE;
		if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION))
		{
			printf("Pose required, but not supported\n");
			return 1;
		}
		g_UserGenerator.GetPoseDetectionCap().RegisterToPoseCallbacks(UserPose_PoseDetected, NULL, NULL, hPoseCallbacks);
		g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose);
	}

	g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

	nRetVal = g_Context.StartGeneratingAll();
	CHECK_RC(nRetVal, "StartGenerating");

	glInit(&argc, argv);
	glutMainLoop();

}
Пример #22
0
int main (int argc, char * argv[])
{
  IplImage* camera = 0;

  try {
    // コンテキストの作成
    xn::Context context;
    XnStatus rc = context.InitFromXmlFile(CONFIG_XML_PATH);
    if (rc != XN_STATUS_OK) {
      throw std::runtime_error(xnGetStatusString(rc));
    }

    // イメージジェネレータの作成
    xn::ImageGenerator image;
    rc = context.FindExistingNode(XN_NODE_TYPE_IMAGE, image);
    if (rc != XN_STATUS_OK) {
      throw std::runtime_error(xnGetStatusString(rc));
    }

    // デプスジェネレータの作成
    xn::DepthGenerator depth;
    rc = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
    if (rc != XN_STATUS_OK) {
      throw std::runtime_error(xnGetStatusString(rc));
    }

    // デプスの座標をイメージに合わせる
    xn::AlternativeViewPointCapability viewPoint =
                                depth.GetAlternativeViewPointCap();
    viewPoint.SetViewPoint(image);

    // カメラサイズのイメージを作成(8bitのRGB)
    xn::ImageMetaData imageMD;
    image.GetMetaData(imageMD);
    camera = ::cvCreateImage(cvSize(imageMD.XRes(), imageMD.YRes()),
      IPL_DEPTH_8U, 3);
    if (!camera) {
      throw std::runtime_error("error : cvCreateImage");
    }

    // メインループ
    while (1) {
      // すべての更新を待ち、画像およびデプスデータを取得する
      context.WaitAndUpdateAll();

      xn::ImageMetaData imageMD;
      image.GetMetaData(imageMD);

      xn::DepthMetaData depthMD;
      depth.GetMetaData(depthMD);

      // デプスマップの作成
      depth_hist depthHist = getDepthHistgram(depth, depthMD);

      // 一定以上の距離は描画しない
      xn::RGB24Map& rgb = imageMD.WritableRGB24Map();
      for (XnUInt y = 0; y < imageMD.YRes(); ++y) {
        for (XnUInt x = 0; x < imageMD.XRes(); ++x) {
          const XnDepthPixel& depth = depthMD(x, y);
          if (depth > 1000) {
            XnRGB24Pixel& pixel = rgb(x, y);
            pixel.nRed   = 255;
            pixel.nGreen = 255;
            pixel.nBlue  = 255;
          }
        }
      }

      // 画像の表示
      memcpy(camera->imageData, imageMD.RGB24Data(), camera->imageSize);
      ::cvCvtColor(camera, camera, CV_RGB2BGR);
      ::cvShowImage("KinectImage", camera);

      // 終了する
      char key = cvWaitKey(10);
      if (key == 'q') {
        break;
      }
    }
  }
  catch (std::exception& ex) {
    std::cout << ex.what() << std::endl;
  }

  ::cvReleaseImage(&camera);

  return 0;
}
void XnPSCompressedDepthProcessor::ProcessFramePacketChunk(const XnSensorProtocolResponseHeader* pHeader, const XnUChar* pData, XnUInt32 nDataOffset, XnUInt32 nDataSize)
{
	XN_PROFILING_START_SECTION("XnPSCompressedDepthProcessor::ProcessFramePacketChunk")

	XnBuffer* pWriteBuffer = GetWriteBuffer();

	const XnUChar* pBuf = NULL;
	XnUInt32 nBufSize = 0;

	// check if we have bytes stored from previous calls
	if (m_RawData.GetSize() > 0)
	{
		// we have no choice. We need to append current buffer to previous bytes
		if (m_RawData.GetFreeSpaceInBuffer() < nDataSize)
		{
			xnLogWarning(XN_MASK_SENSOR_PROTOCOL_DEPTH, "Bad overflow depth! %d", m_RawData.GetSize());
			FrameIsCorrupted();
		}
		else
		{
			m_RawData.UnsafeWrite(pData, nDataSize);
		}

		pBuf = m_RawData.GetData();
		nBufSize = m_RawData.GetSize();
	}
	else
	{
		// we can process the data directly
		pBuf = pData;
		nBufSize = nDataSize;
	}

	XnUInt32 nOutputSize = pWriteBuffer->GetFreeSpaceInBuffer();
	XnUInt32 nWrittenOutput = nOutputSize;
	XnUInt32 nActualRead = 0;
	XnBool bLastPart = pHeader->nType == XN_SENSOR_PROTOCOL_RESPONSE_DEPTH_END && (nDataOffset + nDataSize) == pHeader->nBufSize;
	XnStatus nRetVal = UncompressDepthPS(pBuf, nBufSize, (XnUInt16*)pWriteBuffer->GetUnsafeWritePointer(), 
		&nWrittenOutput, &nActualRead, bLastPart);

	if (nRetVal != XN_STATUS_OK)
	{
		FrameIsCorrupted();

		static XnUInt64 nLastPrinted = 0;

		XnUInt64 nCurrTime;
		xnOSGetTimeStamp(&nCurrTime);

		if (nOutputSize != 0 || (nCurrTime - nLastPrinted) > 1000) 
		{
			xnLogWarning(XN_MASK_SENSOR_PROTOCOL_DEPTH, "Uncompress depth failed: %s. Input Size: %u, Output Space: %u, Last Part: %d.", xnGetStatusString(nRetVal), nBufSize, nOutputSize, bLastPart);

			xnOSGetTimeStamp(&nLastPrinted);
		}
	}

	pWriteBuffer->UnsafeUpdateSize(nWrittenOutput);

	nBufSize -= nActualRead;
	m_RawData.Reset();

	// if we have any bytes left, keep them for next time
	if (nBufSize > 0)
	{
		pBuf += nActualRead;
		m_RawData.UnsafeWrite(pBuf, nBufSize);
	}

	XN_PROFILING_END_SECTION
}
Пример #24
0
int Init()
{
	
	XnStatus rc;

	//Make sure our image types are the same as the OpenNI image types.
	assert(sizeof(XnRGB24Pixel) == sizeof(ColorPixel));
	assert(sizeof(XnDepthPixel) == sizeof(DepthPixel));
	assert(sizeof(XnStatus) == sizeof(int));

	// Load OpenNI xml settings
	char filePath[255];
	int length = Util::Helpers::GetExeDirectory(filePath, sizeof(filePath));
	filePath[length] = '\\';
	strcpy(&filePath[length+1], SAMPLE_XML_PATH);

	EnumerationErrors errors;
	rc = deviceContext.InitFromXmlFile(filePath, &errors);
	if (rc == XN_STATUS_NO_NODE_PRESENT)
	{
		//One reason would be if Microsoft SDK is installed beside PrimeSense.  Device manager should say PrimeSense instead of Microsoft Kinect.
		
		//XnChar strError[1024];
		//errors.ToString(strError, 1024);
		//LOGE("%s\n", strError);
		return -1;
	}
	else if (rc != XN_STATUS_OK)
	{
		fprintf(stderr, "%s\n", xnGetStatusString(rc));
		/*LOGE("Open failed: %s\n", xnGetStatusString(rc));*/
		return (rc);
	}

	// Retrieve colour and depth nodes
	rc = deviceContext.FindExistingNode(XN_NODE_TYPE_IMAGE, colorImageGenerator);
	rc = deviceContext.FindExistingNode(XN_NODE_TYPE_DEPTH, depthImageGenerator);

	// Set mirror mode to off
	SetMirrorMode(false);

	// Get a frame to perform checks on it
	ImageMetaData colorImageMetaData;
	DepthMetaData depthImageMetaData;
	depthImageGenerator.GetMetaData(depthImageMetaData);
	colorImageGenerator.GetMetaData(colorImageMetaData);

	// Hybrid mode isn't supported in this sample
	if (colorImageMetaData.FullXRes() != depthImageMetaData.FullXRes() || colorImageMetaData.FullYRes() != depthImageMetaData.FullYRes())
	{
		/*LOGE("The device depth and image resolution must be equal!\n");*/
		return 1;
	}

	// RGB is the only image format supported.
	if (colorImageMetaData.PixelFormat() != XN_PIXEL_FORMAT_RGB24)
	{
		/*LOGE("The device image format must be RGB24\n");*/
		return 1;
	}
	
	// Need to make sure the automatic alignment of colour and depth images is supported.
	XnBool isSupported = depthImageGenerator.IsCapabilitySupported("AlternativeViewPoint");
	if(!isSupported)
	{
		/*LOGE("Cannot set AlternativeViewPoint!\n");*/
		return 1;
	}

	
	// Set it to VGA maps at 30 FPS
	/*XnMapOutputMode mapMode;
	mapMode.nXRes = XN_VGA_X_RES;
	mapMode.nYRes = XN_VGA_Y_RES;
	mapMode.nFPS = 60;
	rc = g_depth.SetMapOutputMode(mapMode);
	if(rc)
	{
		LOGE("Failed to set depth map mode: %s\n", xnGetStatusString(rc));
		return 1;
	}
	mapMode.nFPS = 30;
	rc = g_image.SetMapOutputMode(mapMode);
	if(rc)
	{
		LOGE("Failed to set image map mode: %s\n", xnGetStatusString(rc));
		return 1;
	}*/


	// Set automatic alignment of the colour and depth images.
	rc = depthImageGenerator.GetAlternativeViewPointCap().SetViewPoint(colorImageGenerator);
	if(rc)
	{
		/*LOGE("Failed to set depth map mode: %s\n", xnGetStatusString(rc));*/
		return 1;
	}


	return XN_STATUS_OK;
}
Пример #25
0
int main (int argc, char * argv[])
{

    try {
        // コンテキストの初期化
        xn::Context context;
        XnStatus rc = context.InitFromXmlFile(CONFIG_XML_PATH);
        if (rc != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(rc));
        }

        // イメージジェネレータの作成
        xn::ImageGenerator image;
        rc = context.FindExistingNode(XN_NODE_TYPE_IMAGE, image);
        if (rc != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(rc));
        }

        // デプスジェネレータの作成
        xn::DepthGenerator depth;
        rc = context.FindExistingNode(XN_NODE_TYPE_DEPTH, depth);
        if (rc != XN_STATUS_OK) {
            throw std::runtime_error(xnGetStatusString(rc));
        }

        // デプスの座標をイメージに合わせる
        depth.GetAlternativeViewPointCap().SetViewPoint(image);

        // ユーザーの作成
        xn::UserGenerator user;
        rc = context.FindExistingNode( XN_NODE_TYPE_USER, user );
        if ( rc != XN_STATUS_OK ) {
            rc = user.Create(context);
            if ( rc != XN_STATUS_OK ) {
                throw std::runtime_error( xnGetStatusString( rc ) );
            }
        }

        // ユーザー検出機能をサポートしているか確認
        if (!user.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
            throw std::runtime_error("ユーザー検出をサポートしてません");
        }

        XnCallbackHandle userCallbacks, calibrationCallbacks, poseCallbacks;
        XnChar pose[20] = "";

        // キャリブレーションにポーズが必要
        xn::SkeletonCapability skelton = user.GetSkeletonCap();
        if (skelton.NeedPoseForCalibration()) {
            // ポーズ検出のサポートチェック
            if (!user.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) {
                throw std::runtime_error("ポーズ検出をサポートしてません");
            }

            // キャリブレーションポーズの取得
            skelton.GetCalibrationPose(pose);

            // ポーズ検出のコールバックを登録
            xn::PoseDetectionCapability pose = user.GetPoseDetectionCap();
            pose.RegisterToPoseCallbacks(&::PoseDetected, &::PoseLost,
                &user, poseCallbacks);
        }

        // ユーザー認識のコールバックを登録
        user.RegisterUserCallbacks(&::UserDetected, &::UserLost, pose, userCallbacks);

        // キャリブレーションのコールバックを登録
        skelton.RegisterCalibrationCallbacks(&::CalibrationStart, &::CalibrationEnd,
            &user, calibrationCallbacks);

        // ユーザートラッキングで、すべてをトラッキングする
        skelton.SetSkeletonProfile(XN_SKEL_PROFILE_ALL);

        // ジェスチャー検出の開始
        context.StartGeneratingAll();

        // カメラサイズのイメージを作成(8bitのRGB)
        XnMapOutputMode outputMode;
        image.GetMapOutputMode(outputMode);
        cv::Ptr< IplImage > camera = ::cvCreateImage(cvSize(outputMode.nXRes, outputMode.nYRes), IPL_DEPTH_8U, 3);
        if (!camera) {
            throw std::runtime_error("error : cvCreateImage");
        }

        // メインループ
        while (1) {
            // すべてのノードの更新を待つ
            context.WaitAndUpdateAll();

            // 画像データの取得
            xn::ImageMetaData imageMD;
            image.GetMetaData(imageMD);

            // ユーザーデータの取得
            xn::SceneMetaData sceneMD;
            user.GetUserPixels(0, sceneMD);

            // カメラ画像の表示
            char* dest = camera->imageData;
            const xn::RGB24Map& rgb = imageMD.RGB24Map();
            for (int y = 0; y < imageMD.YRes(); ++y) {
                for (int x = 0; x < imageMD.XRes(); ++x) {
                    // ユーザー表示
                    XnLabel label = sceneMD(x, y);

                    // カメラ画像の表示
                    XnRGB24Pixel pixel = rgb(x, y);
                    pixel = xnRGB24Pixel( 255, 255, 255 );

                    // 出力先に描画
                    dest[0] = pixel.nRed   * Colors[label][0];
                    dest[1] = pixel.nGreen * Colors[label][1];
                    dest[2] = pixel.nBlue  * Colors[label][2];
                    dest += 3;
                }
            }

            // スケルトンの描画
            XnUserID aUsers[15];
            XnUInt16 nUsers = 15;
            user.GetUsers(aUsers, nUsers);
            for (int i = 0; i < nUsers; ++i) {
                if (skelton.IsTracking(aUsers[i])) {
                    SkeltonDrawer skeltonDrawer(camera, skelton,
                        depth, aUsers[i]);
                    skeltonDrawer.draw();
                }
            }

            ::cvCvtColor(camera, camera, CV_BGR2RGB);
            ::cvShowImage("KinectImage", camera);

            // キーイベント
            char key = cvWaitKey(10);
            // 終了する
            if (key == 'q') {
                break;
            }
        }
    }
    catch (std::exception& ex) {
        std::cout << ex.what() << std::endl;
    }

    return 0;
}
Пример #26
0
bool OpenNIDevice::isDepthCropped () const throw (OpenNIException)
{
  lock_guard<mutex> depth_lock (depth_mutex_);
  XnCropping cropping;
  xn::DepthGenerator& depth_generator = const_cast<xn::DepthGenerator&>(depth_generator_);
  XnStatus status = depth_generator.GetCroppingCap ().GetCropping (cropping);
  if (status != XN_STATUS_OK)
    THROW_OPENNI_EXCEPTION ("could not read cropping information for depth stream. Reason: %s", xnGetStatusString (status));

  return cropping.bEnabled;
}
//----------------------------------------------
void myDepthGenerator::startGenerating() {
    XnStatus status = depth_generator.StartGenerating();
    if (status == XN_STATUS_OK) printf("DepthGenerator %i Generating Start!\n", xtionNum);
    else ofLogError("myDepthGenerator/startGenerating ",xnGetStatusString(status));
}
Пример #28
0
void OpenNIDevice::setDepthCropping (unsigned x, unsigned y, unsigned width, unsigned height) throw (OpenNIException)
{
  lock_guard<mutex> depth_lock (depth_mutex_);
  XnCropping cropping;
  cropping.nXOffset = x;
  cropping.nYOffset = y;
  cropping.nXSize   = width;
  cropping.nYSize   = height;

  cropping.bEnabled = (width != 0 && height != 0);
  XnStatus status = depth_generator_.GetCroppingCap ().SetCropping (cropping);
  if (status != XN_STATUS_OK)
    THROW_OPENNI_EXCEPTION ("could not set cropping information for depth stream. Reason: %s", xnGetStatusString (status));
}
Пример #29
0
CvCapture_OpenNI::CvCapture_OpenNI( int index )
{
    int deviceType = DEVICE_DEFAULT;
    XnStatus status;

    isContextOpened = false;
    maxBufferSize = DEFAULT_MAX_BUFFER_SIZE;
    isCircleBuffer = DEFAULT_IS_CIRCLE_BUFFER;
    maxTimeDuration = DEFAULT_MAX_TIME_DURATION;

    if( index >= 10 )
    {
        deviceType = index / 10;
        index %= 10;
    }

    if( deviceType > DEVICE_MAX )
        return;

    // Initialize and configure the context.
    status = context.Init();
    if( status != XN_STATUS_OK )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed to initialize the context: "
                  << std::string(xnGetStatusString(status)) << std::endl;
        return;
    }

    // Find devices
    xn::NodeInfoList devicesList;
    status = context.EnumerateProductionTrees( XN_NODE_TYPE_DEVICE, NULL, devicesList, 0 );
    if( status != XN_STATUS_OK )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate production trees: "
                  << std::string(xnGetStatusString(status)) << std::endl;
        return;
    }

    // Chose device according to index
    xn::NodeInfoList::Iterator it = devicesList.Begin();
        for( int i = 0; i < index && it!=devicesList.End(); ++i ) it++;
        if ( it == devicesList.End() )
        {
            std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed device with index " << index << std::endl;
            return;
        }

    xn::NodeInfo deviceNode = *it;
    status = context.CreateProductionTree( deviceNode, productionNode );
    if( status != XN_STATUS_OK )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create production tree: "
                  << std::string(xnGetStatusString(status)) << std::endl;
        return;
    }

    xn::ScriptNode scriptNode;
    status = context.RunXmlScript( XMLConfig.c_str(), scriptNode );
    if( status != XN_STATUS_OK )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed to run xml script: "
                  << std::string(xnGetStatusString(status)) << std::endl;
        return;
    }

    // Associate generators with context.
    // enumerate the nodes to find if depth generator is present
    xn::NodeInfoList depthList;
    status = context.EnumerateExistingNodes( depthList, XN_NODE_TYPE_DEPTH );
    if( status != XN_STATUS_OK )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate depth generators: "
                  << std::string(xnGetStatusString(status)) << std::endl;
        return;
    }
    if( depthList.IsEmpty() )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : The device doesn't have depth generator. Such devices aren't supported now." << std::endl;
        return;
    }
    status = depthGenerator.Create( context );
    if( status != XN_STATUS_OK )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create depth generator: "
                  << std::string(xnGetStatusString(status)) << std::endl;
        return;
    }

    // enumerate the nodes to find if image generator is present
    xn::NodeInfoList imageList;
    status = context.EnumerateExistingNodes( imageList, XN_NODE_TYPE_IMAGE );
    if( status != XN_STATUS_OK )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate image generators: "
                  << std::string(xnGetStatusString(status)) << std::endl;
        return;
    }

    if( !imageList.IsEmpty() )
    {
        status = imageGenerator.Create( context );
        if( status != XN_STATUS_OK )
        {
            std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create image generator: "
                      <<  std::string(xnGetStatusString(status)) << std::endl;
            return;
        }
    }

    // Set map output mode.
    if( depthGenerator.IsValid() )
    {
        CV_DbgAssert( depthGenerator.SetMapOutputMode(defaultMapOutputMode()) == XN_STATUS_OK ); // xn::DepthGenerator supports VGA only! (Jan 2011)
    }
    if( imageGenerator.IsValid() )
    {
        CV_DbgAssert( imageGenerator.SetMapOutputMode(defaultMapOutputMode()) == XN_STATUS_OK );
    }

    if( deviceType == DEVICE_ASUS_XTION )
    {
        //ps/asus specific
        imageGenerator.SetIntProperty("InputFormat", 1 /*XN_IO_IMAGE_FORMAT_YUV422*/);
        imageGenerator.SetPixelFormat(XN_PIXEL_FORMAT_RGB24);
        depthGenerator.SetIntProperty("RegistrationType", 1 /*XN_PROCESSING_HARDWARE*/);
    }

    //  Start generating data.
    status = context.StartGeneratingAll();
    if( status != XN_STATUS_OK )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Failed to start generating OpenNI data: "
                  << std::string(xnGetStatusString(status)) << std::endl;
        return;
    }

    if( !readCamerasParams() )
    {
        std::cerr << "CvCapture_OpenNI::CvCapture_OpenNI : Could not read cameras parameters" << std::endl;
        return;
    }

    outputMaps.resize( outputMapsTypesCount );

    isContextOpened = true;

    setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0);
}
Пример #30
0
XnStatus XnServerSession::HandleSetGeneralProperty()
{
	XnStatus nRetVal = XN_STATUS_OK;

	// read it
	XnChar strModule[XN_DEVICE_MAX_STRING_LENGTH];
	XnChar strProp[XN_DEVICE_MAX_STRING_LENGTH];
	XnGeneralBuffer gbValue;
	nRetVal = m_privateIncomingPacker.ReadProperty(strModule, strProp, &gbValue);
	XN_IS_STATUS_OK(nRetVal);

	XnStatus nActionResult = SetGeneralPropertyImpl(strModule, strProp, gbValue);
	if (nActionResult != XN_STATUS_OK)
	{
		xnLogWarning(XN_MASK_SENSOR_SERVER, "Client %u failed to set property '%s.%s': %s", m_nID, strModule, strProp, xnGetStatusString(nActionResult));
	}

	nRetVal = SendReply(XN_SENSOR_SERVER_MESSAGE_GENERAL_OP_RESPOND, nActionResult);
	XN_IS_STATUS_OK(nRetVal);

	return (XN_STATUS_OK);
}