inline void getBGRImageFromMetaData( const xn::ImageMetaData& imageMetaData, cv::Mat& bgrImage ) { if( imageMetaData.PixelFormat() != XN_PIXEL_FORMAT_RGB24 ) CV_Error( CV_StsUnsupportedFormat, "Unsupported format of grabbed image\n" ); cv::Mat rgbImage( imageMetaData.YRes(), imageMetaData.XRes(), CV_8UC3 ); const XnRGB24Pixel* pRgbImage = imageMetaData.RGB24Data(); // CV_Assert( 3*sizeof(uchar) == sizeof(XnRGB24Pixel) ); memcpy( rgbImage.data, pRgbImage, rgbImage.total()*sizeof(XnRGB24Pixel) ); cv::cvtColor( rgbImage, bgrImage, CV_RGB2BGR ); }
IplImage* CvCapture_OpenNI::retrieveGrayImage() { if( !imageMetaData.Data() ) return 0; CV_Assert( imageMetaData.BytesPerPixel() == 3 ); // RGB cv::Mat rgbImage; getBGRImageFromMetaData( imageMetaData, rgbImage ); cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY ); return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr(); }
virtual bool grab( xn::DepthMetaData& depthMetaData, xn::ImageMetaData& imageMetaData ) { while(1) { if( !isDepthFilled ) isDepthFilled = popDepthMetaData(depth); if( !isImageFilled ) isImageFilled = popImageMetaData(image); if( !isDepthFilled || !isImageFilled ) break; double timeDiff = 1e-3 * std::abs(static_cast<double>(depth.Timestamp()) - static_cast<double>(image.Timestamp())); if( timeDiff <= approxSyncGrabber.maxTimeDuration ) { depthMetaData.InitFrom(depth); imageMetaData.InitFrom(image); isDepthFilled = isImageFilled = false; return true; } else { if( depth.Timestamp() < image.Timestamp() ) isDepthFilled = false; else isImageFilled = false; } } return false; }
virtual inline bool popImageMetaData( xn::ImageMetaData& imageMetaData ) { cv::Ptr<xn::ImageMetaData> imagePtr; bool isPop = imageQueue.try_pop(imagePtr); if( isPop ) imageMetaData.CopyFrom(*imagePtr); return isPop; }
virtual inline bool popImageMetaData( xn::ImageMetaData& imageMetaData ) { if( imageQueue.empty() ) return false; imageMetaData.CopyFrom(*imageQueue.front()); imageQueue.pop(); return true; }
IplImage* CvCapture_OpenNI::retrieveBGRImage() { if( !imageMetaData.Data() ) return 0; getBGRImageFromMetaData( imageMetaData, outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat ); return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr(); }
/** * return RGB Image */ std::string getRGBImage(bool br) { { ReadLock r_lock(myLock); stringstream emitter; emitter << "{RGBMap," << g_imageMD.XRes() << ","<< g_imageMD.YRes() << "}["; for(int i = 0; i < g_imageMD.XRes(); i++) { if(i>0 && br == true) emitter << "<BR>"; for(int j = 0; j < g_imageMD.YRes(); j++, pPixelPoint++) { emitter << (int)(pPixelPoint->nRed) << ","; emitter << (int)(pPixelPoint->nGreen) << ","; emitter << (int)(pPixelPoint->nBlue) << ";"; } } emitter << "]"; return emitter.str(); } }
void SimKinect::DrawRGBMap(const xn::ImageMetaData& imd) { static bool bInitialized = false; static int width; static int height; static int width_step; static XnRGB24Pixel* g_pTexMap; if(!bInitialized) { width = video_size_width; height = video_size_height; g_pTexMap = (XnRGB24Pixel*)malloc(width * height * sizeof(XnRGB24Pixel)); width_step = width*3; bInitialized = true; } const XnUInt8* pImage = imd.Data(); const XnRGB24Pixel* pImageRow = imd.RGB24Data(); XnRGB24Pixel* pTexRow = g_pTexMap + imd.YOffset() * width; for (XnUInt y = 0; y < imd.YRes(); ++y) { const XnRGB24Pixel* pImage = pImageRow; XnRGB24Pixel* pTex = pTexRow + imd.XOffset(); for (XnUInt x = 0; x < imd.XRes(); ++x, ++pImage, ++pTex) { *pTex = *pImage; } pImageRow += imd.XRes(); pTexRow += width; } //Draw rgb image XnRGB24Pixel* pix_ptr = g_pTexMap; for ( int i=0 ; i<height ; i++ ) { unsigned char* row_pointer = color_frame+i*width_step; for ( int j=0 ; j<width_step; j+=3,pix_ptr++ ) { row_pointer[j] = pix_ptr->nBlue; row_pointer[j+1]=pix_ptr->nGreen; row_pointer[j+2]=pix_ptr->nRed; } } //imshow("COLOR",color_img); }
bool DataCapture::captureOne() { XnStatus rc = context_.WaitAndUpdateAll(); // want this to be WaitOneUpdateAll(RGB image) if( rc != XN_STATUS_OK ) { std::cout << "WaitAndUpdateAll: " << xnGetStatusString(rc) << std::endl; return false; } // grab image imageGen_.GetMetaData(imageMd_); const XnRGB24Pixel* rgbData = imageMd_.RGB24Data(); for( unsigned int i = 0; i < 640 * 480; ++i ) { pRgbData_[3*i] = rgbData->nRed; pRgbData_[3*i + 1] = rgbData->nGreen; pRgbData_[3*i + 2] = rgbData->nBlue; ++rgbData; } // grab depth image depthGen_.GetMetaData(depthMd_); const uint16_t* pDepthDataU16 = depthMd_.Data(); for( int i = 0; i < 640 * 480; ++i) { uint16_t d = pDepthDataU16[i]; if( d != 0 ) { pDepthData_[i] = (d * 255)/2048; } else { pDepthData_[i] = 0; // should be NAN } } return true; }
int main ( int argc, char ** argv ) { // // Initializing Calibration Related // // ARTagHelper artagHelper ( colorImgWidth, colorImgHeight, ARTAG_CONFIG_FILE, ARTAG_POS_FILE ); ARTagHelper artagHelper ( colorImgWidth, colorImgHeight, ARTAG_CONFIG_A3_FILE, ARTAG_POS_A3_FILE ); ExtrCalibrator extrCalibrator ( 6, KINECT_INTR_FILE, KINECT_DIST_FILE ); // unsigned char * kinectImgBuf = new unsigned char[colorImgWidth * colorImgHeight * 3]; // // Initializing OpenNI Settings // int ctlWndKey = -1; XnStatus nRetVal = XN_STATUS_OK; xn::EnumerationErrors errors; // // Initialize Context Object // nRetVal = g_Context.InitFromXmlFile ( CONFIG_XML_PATH, g_ScriptNode, &errors ); if ( nRetVal == XN_STATUS_NO_NODE_PRESENT ) { XnChar strError[1024]; errors.ToString ( strError, 1024 ); printf ( "XN_STATUS_NO_NODE_PRESENT:\n%s\n", strError ); system ( "pause" ); return ( nRetVal ); } else if ( nRetVal != XN_STATUS_OK ) { printf ( "Open FAILED:\n%s\n", xnGetStatusString ( nRetVal ) ); system ( "pause" ); return ( nRetVal ); } // // Handle the Depth Generator Node. // nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_DEPTH, g_DepthGen ); if ( nRetVal != XN_STATUS_OK ) { printf ( "No Depth Node Exists! Please Check your XML.\n" ); return ( nRetVal ); } // // Handle the Image Generator node // nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_IMAGE, g_ImageGen ); if ( nRetVal != XN_STATUS_OK ) { printf ( "No Image Node Exists! Please Check your XML.\n" ); return ( nRetVal ); } // g_DepthGen.GetAlternativeViewPointCap().SetViewPoint( g_ImageGen ); g_DepthGen.GetMetaData ( g_DepthMD ); g_ImageGen.GetMetaData ( g_ImageMD ); assert ( g_ImageMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24 ); assert ( g_DepthMD.PixelFormat() == XN_PIXEL_FORMAT_GRAYSCALE_16_BIT ); // // Create OpenCV Showing Window and Related Data Structures // cv::namedWindow ( IMAGE_WIN_NAME, CV_WINDOW_AUTOSIZE ); cv::namedWindow ( DEPTH_WIN_NAME, CV_WINDOW_AUTOSIZE ); cv::Mat depthImgMat ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_16UC1 ); cv::Mat depthImgShow ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_8UC3 ); cv::Mat colorImgMat ( g_ImageMD.YRes(), g_ImageMD.XRes(), CV_8UC3 ); #define ARTAG_DEBUG #ifdef ARTAG_DEBUG cv::setMouseCallback ( IMAGE_WIN_NAME, ClickOnMouse, 0 ); #endif bool flipColor = true; // // Start to Loop // while ( ctlWndKey != ESC_KEY_VALUE ) { // // Try to Get New Frame From Kinect // nRetVal = g_Context.WaitAnyUpdateAll (); g_DepthGen.GetMetaData ( g_DepthMD ); g_ImageGen.GetMetaData ( g_ImageMD ); assert ( g_DepthMD.FullXRes() == g_DepthMD.XRes() && g_DepthMD.FullYRes() == g_DepthMD.YRes() ); assert ( g_ImageMD.FullXRes() == g_ImageMD.XRes() && g_ImageMD.FullYRes() == g_ImageMD.YRes() ); GlobalUtility::CopyColorRawBufToCvMat8uc3 ( (const XnRGB24Pixel *)(g_ImageMD.Data()), colorImgMat ); #ifdef SHOW_DEPTH_WINDOW GlobalUtility::CopyDepthRawBufToCvMat16u ( (const XnDepthPixel *)(g_DepthMD.Data()), depthImgMat ); // GlobalUtility::ConvertDepthCvMat16uToYellowCvMat ( depthImgMat, depthImgShow ); GlobalUtility::ConvertDepthCvMat16uToGrayCvMat ( depthImgMat, depthImgShow ); cv::imshow ( DEPTH_WIN_NAME, depthImgShow ); #endif ctlWndKey = cvWaitKey ( 15 ); if ( ctlWndKey == 'f' || ctlWndKey == 'F' ) { artagHelper.Clear(); artagHelper.FindMarkerCorners ( (unsigned char *)(g_ImageMD.Data()) ); artagHelper.PrintMarkerCornersPos2dInCam (); extrCalibrator.ExtrCalib ( artagHelper ); std::cout << "\nKinect Extr Matrix:" << std::endl; extrCalibrator.PrintMatrix ( extrCalibrator.GetMatrix ( ExtrCalibrator::EXTR ) ); std::cout << "Reprojection ERROR = " << extrCalibrator.ComputeReprojectionErr ( artagHelper ) << std::endl // << extrCalibrator.ComputeReprojectionErr ( artagHelper.m_MarkerCornerPosCam2d, artagHelper.m_MarkerCornerPos3d, 24 ) << std::endl << "Valid Marker Number = " << artagHelper.GetValidMarkerNumber() << std::endl << std::endl; extrCalibrator.SaveMatrix ( ExtrCalibrator::EXTR, KINECT_EXTR_FILE ); } if ( ctlWndKey == 's' || ctlWndKey == 'S' ) { flipColor = !flipColor; } if ( flipColor ) { cv::cvtColor ( colorImgMat, colorImgMat, CV_RGB2BGR ); } artagHelper.DrawMarkersInCameraImage ( colorImgMat ); cv::imshow ( IMAGE_WIN_NAME, colorImgMat ); } g_Context.Release (); system ( "pause" ); exit ( EXIT_SUCCESS ); }
// Save new data from OpenNI void MovieMng::Update(const xn::DepthMetaData& dmd, const xn::ImageMetaData& imd) { // バッファ空き確認 if((m_nBufferSizeFile == 0) || (m_nBufferSizeSend == 0)) { printf("バッファ空きなし!! ForFile:%d ForSend:%d\n", m_nBufferSizeFile, m_nBufferSizeSend); return; } SingleFrame* pFrames = m_pFrames + m_nNextWrite; if(m_nNextWrite == 0) { m_nImageHeight = imd.FullYRes(); m_nImageWidth = imd.FullXRes(); m_nDepthHeight = dmd.FullYRes(); m_nDepthWidth = dmd.FullXRes(); printf("Image Height:%d Width:%d Depth Height:%d Width:%d\n", m_nImageHeight, m_nImageWidth, m_nDepthHeight, m_nDepthWidth); } #ifdef LOG_WRITE_ENABLE fprintf(mmng_fp, "[%s] update. m_nNextWrite:%d\n", __FUNCTION__, m_nNextWrite); #endif if (m_bDepth) { pFrames->depthFrame.CopyFrom(dmd); } if (m_bImage) { pFrames->imageFrame.CopyFrom(imd); } // バッファ管理情報書き換え if (m_bFileWrite) { EnterCriticalSection(&csFileWriter); m_nDataCountFile++; m_nBufferSizeFile--; } if (m_bSendDepth) { EnterCriticalSection(&csDepthSender); m_nDataCountSend++; m_nBufferSizeSend--; } m_nNextWrite++; if (m_nBufferSize == m_nNextWrite) { m_nNextWrite = 0; } if (m_bFileWrite) { LeaveCriticalSection(&csFileWriter); } if (m_bSendDepth) { LeaveCriticalSection(&csDepthSender); } }
void start_kinect() { XnStatus nRetVal = XN_STATUS_OK; xn::EnumerationErrors errors; UsersCount = 0; const char *fn = NULL; if (fileExists(SAMPLE_XML_PATH)) fn = SAMPLE_XML_PATH; else if (fileExists(SAMPLE_XML_PATH_LOCAL)) fn = SAMPLE_XML_PATH_LOCAL; else { printf("Could not find '%s' nor '%s'. Aborting.\n" , SAMPLE_XML_PATH, SAMPLE_XML_PATH_LOCAL); //return XN_STATUS_ERROR; } printf("Reading config from: '%s'\n", fn); nRetVal = g_Context.InitFromXmlFile(fn, g_scriptNode, &errors); if (nRetVal == XN_STATUS_NO_NODE_PRESENT) { XnChar strError[1024]; errors.ToString(strError, 1024); printf("%s\n", strError); //return (nRetVal); } else if (nRetVal != XN_STATUS_OK) { printf("Open failed: %s\n", xnGetStatusString(nRetVal)); //return (nRetVal); } nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_depth); CHECK_RC(nRetVal,"No depth"); nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_IMAGE, g_image); CHECK_RC(nRetVal,"No image"); nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator); if (nRetVal != XN_STATUS_OK) { nRetVal = g_UserGenerator.Create(g_Context); CHECK_RC(nRetVal, "Find user generator"); } XnCallbackHandle hUserCallbacks, hCalibrationStart, hCalibrationComplete, hPoseDetected; if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) { printf("Supplied user generator doesn't support skeleton\n"); //return 1; } nRetVal = g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks); CHECK_RC(nRetVal, "Register to user callbacks"); nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationStart(UserCalibration_CalibrationStart, NULL, hCalibrationStart); CHECK_RC(nRetVal, "Register to calibration start"); nRetVal = g_UserGenerator.GetSkeletonCap().RegisterToCalibrationComplete(UserCalibration_CalibrationComplete, NULL, hCalibrationComplete); CHECK_RC(nRetVal, "Register to calibration complete"); if (g_UserGenerator.GetSkeletonCap().NeedPoseForCalibration()) { g_bNeedPose = TRUE; if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_POSE_DETECTION)) { printf("Pose required, but not supported\n"); //return 1; } nRetVal = g_UserGenerator.GetPoseDetectionCap().RegisterToPoseDetected(UserPose_PoseDetected, NULL, hPoseDetected); CHECK_RC(nRetVal, "Register to Pose Detected"); g_UserGenerator.GetSkeletonCap().GetCalibrationPose(g_strPose); } g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_ALL); nRetVal = g_Context.StartGeneratingAll(); CHECK_RC(nRetVal, "StartGenerating"); XnUserID aUsers[MAX_NUM_USERS]; XnUInt16 nUsers; XnSkeletonJointTransformation anyjoint; printf("Starting to run\n"); if(g_bNeedPose) { printf("Assume calibration pose\n"); } XnUInt32 epochTime = 0; while (!xnOSWasKeyboardHit()) { g_Context.WaitOneUpdateAll(g_UserGenerator); // print the torso information for the first user already tracking nUsers=MAX_NUM_USERS; g_UserGenerator.GetUsers(aUsers, nUsers); int numTracked=0; int userToPrint=-1; WriteLock w_lock(myLock); pDepthMap = g_depth.GetDepthMap(); pPixelMap = g_image.GetRGB24ImageMap(); g_depth.GetMetaData(g_depthMD); g_image.GetMetaData(g_imageMD); pPixelPoint = g_imageMD.RGB24Data(); for(XnUInt16 i=0; i<nUsers; i++) { if(g_UserGenerator.GetSkeletonCap().IsTracking(aUsers[i])==FALSE) continue; { /* Writing all new movements into structure*/ /* Head */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_HEAD,anyjoint); Skeletons[i]["Head"]["X"] = anyjoint.position.position.X; Skeletons[i]["Head"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["Head"]["Z"] = anyjoint.position.position.Z; /* Neck */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_NECK,anyjoint); Skeletons[i]["Neck"]["X"] = anyjoint.position.position.X; Skeletons[i]["Neck"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["Neck"]["Z"] = anyjoint.position.position.Z; /* Left Shoulder */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_SHOULDER,anyjoint); Skeletons[i]["LeftShoulder"]["X"] = anyjoint.position.position.X; Skeletons[i]["LeftShoulder"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["LeftShoulder"]["Z"] = anyjoint.position.position.Z; /* Right Shoulder */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_SHOULDER,anyjoint); Skeletons[i]["RightShoulder"]["X"] = anyjoint.position.position.X; Skeletons[i]["RightShoulder"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["RightShoulder"]["Z"] = anyjoint.position.position.Z; /* Torso */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_TORSO,anyjoint); Skeletons[i]["Torso"]["X"] = anyjoint.position.position.X; Skeletons[i]["Torso"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["Torso"]["Z"] = anyjoint.position.position.Z; /* Left Elbow */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_ELBOW,anyjoint); Skeletons[i]["LeftElbow"]["X"] = anyjoint.position.position.X; Skeletons[i]["LeftElbow"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["LeftElbow"]["Z"] = anyjoint.position.position.Z; /* Right Elbow */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_ELBOW,anyjoint); Skeletons[i]["RightElbow"]["X"] = anyjoint.position.position.X; Skeletons[i]["RightElbow"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["RightElbow"]["Z"] = anyjoint.position.position.Z; /* Left Hip */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_HIP,anyjoint); Skeletons[i]["LeftHip"]["X"] = anyjoint.position.position.X; Skeletons[i]["LeftHip"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["LeftHip"]["Z"] = anyjoint.position.position.Z; /* Right Hip */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_HIP,anyjoint); Skeletons[i]["RightHip"]["X"] = anyjoint.position.position.X; Skeletons[i]["RightHip"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["RightHip"]["Z"] = anyjoint.position.position.Z; /* Left Hand */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_HAND,anyjoint); Skeletons[i]["LeftHand"]["X"] = anyjoint.position.position.X; Skeletons[i]["LeftHand"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["LeftHand"]["Z"] = anyjoint.position.position.Z; /* Right Hand */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_HAND,anyjoint); Skeletons[i]["RightHand"]["X"] = anyjoint.position.position.X; Skeletons[i]["RightHand"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["RightHand"]["Z"] = anyjoint.position.position.Z; /* Left Knee */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_KNEE,anyjoint); Skeletons[i]["LeftKnee"]["X"] = anyjoint.position.position.X; Skeletons[i]["LeftKnee"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["LeftKnee"]["Z"] = anyjoint.position.position.Z; /* Right Knee */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_KNEE,anyjoint); Skeletons[i]["RightKnee"]["X"] = anyjoint.position.position.X; Skeletons[i]["RightKnee"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["RightKnee"]["Z"] = anyjoint.position.position.Z; /* Left Foot */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_LEFT_FOOT,anyjoint); Skeletons[i]["LeftFoot"]["X"] = anyjoint.position.position.X; Skeletons[i]["LeftFoot"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["LeftFoot"]["Z"] = anyjoint.position.position.Z; /* Right Foot */ g_UserGenerator.GetSkeletonCap().GetSkeletonJoint(aUsers[i],XN_SKEL_RIGHT_FOOT,anyjoint); Skeletons[i]["RightFoot"]["X"] = anyjoint.position.position.X; Skeletons[i]["RightFoot"]["Y"] = anyjoint.position.position.Y; Skeletons[i]["RightFoot"]["Z"] = anyjoint.position.position.Z; /*printf("user %d: head at (%6.2f,%6.2f,%6.2f)\n",aUsers[i], Skeletons[i]["Head"]["X"], Skeletons[i]["Head"]["Y"], Skeletons[i]["Head"]["Z"]);*/ } } } g_scriptNode.Release(); g_depth.Release(); g_image.Release(); g_UserGenerator.Release(); g_Context.Release(); }
int main ( int argc, char * argv[] ) { // // Initialize OpenNI Settings // XnStatus nRetVal = XN_STATUS_OK; xn::ScriptNode scriptNode; xn::EnumerationErrors errors; // // Initialize Context Object // nRetVal = g_Context.InitFromXmlFile ( CONFIG_XML_PATH, scriptNode, &errors ); if ( nRetVal == XN_STATUS_NO_NODE_PRESENT ) { XnChar strError[1024]; errors.ToString(strError, 1024); printf ( "XN_STATUS_NO_NODE_PRESENT:\n%s\n", strError ); system ( "pause" ); return ( nRetVal ); } else if ( nRetVal != XN_STATUS_OK ) { printf ( "Open failed: %s\n", xnGetStatusString(nRetVal) ); system ( "pause" ); return ( nRetVal ); } // // Handle Image & Depth Generator Node // bool colorFlag = true; bool depthFlag = true; nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_DEPTH, g_DepthGen ); if ( nRetVal != XN_STATUS_OK ) { printf("No depth node exists!\n"); depthFlag = false; } nRetVal = g_Context.FindExistingNode ( XN_NODE_TYPE_IMAGE, g_ImageGen ); if ( nRetVal != XN_STATUS_OK ) { printf("No image node exists!\n"); colorFlag = false; } // g_DepthGen.GetAlternativeViewPointCap().SetViewPoint( g_ImageGen ); if ( depthFlag ) { g_DepthGen.GetMetaData ( g_DepthMD ); assert ( g_DepthMD.PixelFormat() == XN_PIXEL_FORMAT_GRAYSCALE_16_BIT ); } if ( colorFlag ) { g_ImageGen.GetMetaData ( g_ImageMD ); assert ( g_ImageMD.PixelFormat() == XN_PIXEL_FORMAT_RGB24 ); } g_DepthImgShow = cv::Mat ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_8UC1 ); g_DepthImgMat = cv::Mat ( g_DepthMD.YRes(), g_DepthMD.XRes(), CV_16UC1 ); g_ColorImgMat = cv::Mat ( g_ImageMD.YRes(), g_ImageMD.XRes(), CV_8UC3 ); // // Start to Loop // bool flipColor = true; int ctlWndKey = -1; g_StartTickCount = GetTickCount(); g_HeadTrackingFrameCount = 0; while ( ctlWndKey != ESC_KEY_VALUE ) { nRetVal = g_Context.WaitOneUpdateAll ( g_DepthGen ); // nRetVal = g_Context.WaitAnyUpdateAll(); #ifdef HANDLING_IMAGE_DATA if ( colorFlag ) { g_ImageGen.GetMetaData ( g_ImageMD ); assert ( g_ImageMD.FullXRes() == g_ImageMD.XRes() ); assert ( g_ImageMD.FullYRes() == g_ImageMD.YRes() ); GlobalUtility::CopyColorRawBufToCvMat8uc3 ( (const XnRGB24Pixel *)(g_ImageMD.Data()), g_ColorImgMat ); if ( ctlWndKey == 's' || ctlWndKey == 'S' ) { // Switch flipColor = !flipColor; } if ( flipColor ) { cv::cvtColor ( g_ColorImgMat, g_ColorImgMat, CV_RGB2BGR ); } cv::namedWindow ( IMAGE_WIN_NAME, CV_WINDOW_AUTOSIZE ); cv::imshow ( IMAGE_WIN_NAME, g_ColorImgMat ); } #endif #ifdef HANDLING_DEPTH_DATA if ( depthFlag ) { g_DepthGen.GetMetaData(g_DepthMD); // assert ( g_DepthMD.FullXRes() == g_DepthMD.XRes() ); // assert ( g_DepthMD.FullYRes() == g_DepthMD.YRes() ); GlobalUtility::CopyDepthRawBufToCvMat16u ( (const XnDepthPixel *)(g_DepthMD.Data()), g_DepthImgMat ); GlobalUtility::ConvertDepthCvMat16uToGrayCvMat ( g_DepthImgMat, g_DepthImgShow ); /* cv::putText(colorImgMat, GlobalUtility::DoubleToString(g_ImageMD.FPS()) + " FPS", cv::Point(10, 450), cv::FONT_ITALIC, 0.7, cv::Scalar(255, 255, 255, 0), 2, 8, false); */ cv::namedWindow ( DEPTH_WIN_NAME, CV_WINDOW_AUTOSIZE ); cv::imshow ( DEPTH_WIN_NAME, g_DepthImgShow ); } #endif XnFieldOfView fov; g_DepthGen.GetFieldOfView( fov ); std::cout << "HFov = " << fov.fHFOV << std::endl << "VFov = " << fov.fVFOV << std::endl; ctlWndKey = cvWaitKey ( 5 ); g_HeadTrackingFrameCount++; g_CurrTickCount = GetTickCount(); std::cout << "FPS = " << 1000 / ( ( double )( g_CurrTickCount - g_StartTickCount ) / ( double )( g_HeadTrackingFrameCount ) ) << std::endl; } g_Context.Release (); exit ( EXIT_SUCCESS ); }
bool OpenNIVideo::update(osg::NodeVisitor* nv) { //this is the main function of your video plugin //you can either retrieve images from your video stream/camera/file //or communicate with a thread to synchronize and get the data out //the most important is to synchronize your data //and copy the result to the VideoImageSteam used in this plugin // //0. you can collect some stats, for that you can use a timer osg::Timer t; { //1. mutex lock access to the image video stream OpenThreads::ScopedLock<OpenThreads::Mutex> _lock(this->getMutex()); osg::notify(osg::DEBUG_INFO)<<"osgART::OpenNIVideo::update() get new image.."<<std::endl; XnStatus nRetVal = XN_STATUS_OK; nRetVal=context.WaitAndUpdateAll(); CHECK_RC(nRetVal, "Update Data"); xnFPSMarkFrame(&xnFPS); depth_generator.GetMetaData(depthMD); const XnDepthPixel* pDepthMap = depthMD.Data(); //depth pixel floating point depth map. image_generator.GetMetaData(imageMD); const XnUInt8* pImageMap = imageMD.Data(); // Hybrid mode isn't supported in this sample if (imageMD.FullXRes() != depthMD.FullXRes() || imageMD.FullYRes() != depthMD.FullYRes()) { std::cerr<<"The device depth and image resolution must be equal!"<<std::endl; exit(1); } // RGB is the only image format supported. if (imageMD.PixelFormat() != XN_PIXEL_FORMAT_RGB24) { std::cerr<<"The device image format must be RGB24"<<std::endl; exit(1); } const XnDepthPixel* pDepth=pDepthMap; const XnUInt8* pImage=pImageMap; XnDepthPixel zMax = depthMD.ZRes(); //convert float buffer to unsigned short for ( unsigned int i=0; i<(depthMD.XRes() * depthMD.YRes()); ++i ) { *(_depthBufferByte + i) = 255 * (float(*(pDepth + i)) / float(zMax)); } memcpy(_videoStreamList[0]->data(),pImage, _videoStreamList[0]->getImageSizeInBytes()); memcpy(_videoStreamList[1]->data(),_depthBufferByte, _videoStreamList[1]->getImageSizeInBytes()); //3. don't forget to call this to notify the rest of the application //that you have a new video image _videoStreamList[0]->dirty(); _videoStreamList[1]->dirty(); } //4. hopefully report some interesting data if (nv) { const osg::FrameStamp *framestamp = nv->getFrameStamp(); if (framestamp && _stats.valid()) { _stats->setAttribute(framestamp->getFrameNumber(), "Capture time taken", t.time_m()); } } // Increase modified count every X ms to ensure tracker updates if (updateTimer.time_m() > 50) { _videoStreamList[0]->dirty(); _videoStreamList[1]->dirty(); updateTimer.setStartTick(); } return true; }
void DrawDepthMap(const xn::DepthMetaData& dmd, const xn::SceneMetaData& smd, XnUserID player, xn::ImageMetaData& imd) { texWidth = 640; texHeight = 480; LEFT = 0; RIGHT = 640; TOP = 0; BOTTOM = 480; nValue = 0; nIndex = 0; nX = 0; nY = 0; nNumberOfPoints = 0; g_nXRes = dmd.XRes(); g_nYRes = dmd.YRes(); pDestImage = pDepthTexBuf; pDepth = dmd.Data(); pixel = imd.RGB24Data(); pLabels = smd.Data(); // Calculate the accumulative histogram memset(g_pDepthHist, 0, MAX_DEPTH*sizeof(float)); for (nY=0; nY<g_nYRes; nY++) { for (nX=0; nX<g_nXRes; nX++) { nValue = *pDepth; if (nValue != 0) { g_pDepthHist[nValue]++; nNumberOfPoints++; } pDepth++; } } for (nIndex=1; nIndex<MAX_DEPTH; nIndex++) { g_pDepthHist[nIndex] += g_pDepthHist[nIndex-1]; } if (nNumberOfPoints) { for (nIndex=1; nIndex<MAX_DEPTH; nIndex++) { g_pDepthHist[nIndex] = (unsigned int)(256 * (1.0f - (g_pDepthHist[nIndex] / nNumberOfPoints))); } } // printf("Debug: %i\n",focus); pDepth = (short unsigned int*)dmd.Data(); ///{ // Prepare the texture map for (nY=0; nY<g_nYRes; nY++) { for (nX=0; nX < g_nXRes; nX++) { nValue = *pDepth; if(nX == (int)centerScreen[0] && nY == (int)centerScreen[1]){ if (calibrationMode){ depthVal = nValue; // printf("depthVal: %i\n",depthVal); } } //printf("Depth: %i \n",nValue); label = *pLabels; // XnUInt32 nColorID = label % nColors; if (label != focus) { if(calibrationMode){ pDestImage[0] = pixel->nRed; pDestImage[1] = pixel->nGreen; pDestImage[2] = pixel->nBlue; pDestImage[3] = 255; } else { pDestImage[0] = 0; pDestImage[1] = 0; pDestImage[2] = 0; pDestImage[3] = 0; } } else { pDestImage[0] = pixel->nRed; pDestImage[1] = pixel->nGreen; pDestImage[2] = pixel->nBlue; pDestImage[3] = 255; //find max/min values for width and height boundaries if (nX > (unsigned int)LEFT) { LEFT = nX; } if (nX < (unsigned int)RIGHT) { RIGHT = nX; } if (nY > (unsigned int)TOP) { TOP = nY; } if (nY < (unsigned int)BOTTOM) { BOTTOM = nY; } } pixel++; pDepth++; pLabels++; pDestImage+=4; } pDestImage += (texWidth - g_nXRes) *4; } //}*/ // Display the OpenGL texture map glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, texWidth, texHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, pDepthTexBuf); nUsers = 15; g_UserGenerator.GetUsers(aUsers, nUsers); g_UserGenerator.GetCoM(aUsers[0], com); CENTER = com.X; //glDisable(GL_BLEND); //glDisable(GL_TEXTURE_2D); /* char strLabel[20] = ""; XnUserID aUsers[15]; XnUInt16 nUsers = 15; g_UserGenerator.GetUsers(aUsers, nUsers); for (int i = 0; i < nUsers; ++i) { XnPoint3D com; g_UserGenerator.GetCoM(aUsers[i], com); g_DepthGenerator.ConvertRealWorldToProjective(1, &com, &com); if (aUsers[i] == player) sprintf(strLabel, "%d (Player)", aUsers[i]); else sprintf(strLabel, "%d", aUsers[i]); //glColor4f(1-Colors[i%nColors][0], 1-Colors[i%nColors][1], 1-Colors[i%nColors][2], 1); glRasterPos2i(com.X, com.Y); glPrintString(GLUT_BITMAP_HELVETICA_18, strLabel); */ // } // Draw skeleton of user // glBegin(GL_LINES); // //glColor4f(1,0,0,1.0f); // glVertex3f(-1, 1, 0); // glVertex3f(1, -1, 0); // //glColor4f(1-Colors[player%nColors][0], 1-Colors[player%nColors][1], 1-Colors[player%nColors][2], 1); // DrawLimb(1, XN_SKEL_HEAD, XN_SKEL_NECK); // // DrawLimb(1, XN_SKEL_NECK, XN_SKEL_LEFT_SHOULDER); // DrawLimb(1, XN_SKEL_LEFT_SHOULDER, XN_SKEL_LEFT_ELBOW); // DrawLimb(1, XN_SKEL_LEFT_ELBOW, XN_SKEL_LEFT_HAND); // // DrawLimb(1, XN_SKEL_NECK, XN_SKEL_RIGHT_SHOULDER); // DrawLimb(1, XN_SKEL_RIGHT_SHOULDER, XN_SKEL_RIGHT_ELBOW); // DrawLimb(1, XN_SKEL_RIGHT_ELBOW, XN_SKEL_RIGHT_HAND); // // DrawLimb(1, XN_SKEL_LEFT_SHOULDER, XN_SKEL_TORSO); // DrawLimb(1, XN_SKEL_RIGHT_SHOULDER, XN_SKEL_TORSO); // // DrawLimb(1, XN_SKEL_TORSO, XN_SKEL_LEFT_HIP); // DrawLimb(1, XN_SKEL_LEFT_HIP, XN_SKEL_LEFT_KNEE); // DrawLimb(1, XN_SKEL_LEFT_KNEE, XN_SKEL_LEFT_FOOT); // // DrawLimb(1, XN_SKEL_TORSO, XN_SKEL_RIGHT_HIP); // DrawLimb(1, XN_SKEL_RIGHT_HIP, XN_SKEL_RIGHT_KNEE); // DrawLimb(1, XN_SKEL_RIGHT_KNEE, XN_SKEL_RIGHT_FOOT); // glEnd(); //XnSkeletonJoint eJoint1 = XN_SKEL_RIGHT_HAND; }