/// <summary> /// Draw a bone between 2 tracked joint. /// <summary> /// <param name="skeletonData">Skeleton coordinates</param> /// <param name="imageRect">The rect which the color or depth image is streched to fit</param> /// <param name="joint0">Index for the first joint</param> /// <param name="joint1">Index for the second joint</param> void UKinect::drawBone(const NUI_SKELETON_DATA& skeletonData, NUI_SKELETON_POSITION_INDEX joint0, NUI_SKELETON_POSITION_INDEX joint1) { NUI_SKELETON_POSITION_TRACKING_STATE state0 = skeletonData.eSkeletonPositionTrackingState[joint0]; NUI_SKELETON_POSITION_TRACKING_STATE state1 = skeletonData.eSkeletonPositionTrackingState[joint1]; // Any is not tracked if (NUI_SKELETON_POSITION_NOT_TRACKED == state0 || NUI_SKELETON_POSITION_NOT_TRACKED == state1) { return; } // Both are inferred if (NUI_SKELETON_POSITION_INFERRED == state0 && NUI_SKELETON_POSITION_INFERRED == state1) { return; } LONG x1, y1, x2, y2; LONG x1d, y1d, x2d, y2d; USHORT depth; NuiTransformSkeletonToDepthImage(skeletonData.SkeletonPositions[joint0], &x1d, &y1d, &depth, (NUI_IMAGE_RESOLUTION)depthResolution.as<int>()); NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution((NUI_IMAGE_RESOLUTION)colorResolution.as<int>(), (NUI_IMAGE_RESOLUTION)depthResolution.as<int>(), NULL, x1d, y1d, depth, &x1, &y1); NuiTransformSkeletonToDepthImage(skeletonData.SkeletonPositions[joint1], &x2d, &y2d, &depth, (NUI_IMAGE_RESOLUTION)depthResolution.as<int>()); NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution((NUI_IMAGE_RESOLUTION)colorResolution.as<int>(), (NUI_IMAGE_RESOLUTION)depthResolution.as<int>(), NULL, x2d, y2d, depth, &x2, &y2); // We assume all drawn bones are inferred unless BOTH joints are tracked if (NUI_SKELETON_POSITION_TRACKED == state0 && NUI_SKELETON_POSITION_TRACKED == state1) { line(skeletonCVMat, Point(x1, y1), Point(x2, y2), CV_RGB(0, 0, 255), 4); } else { line(skeletonCVMat, Point(x1, y1), Point(x2, y2), CV_RGB(160, 160, 180), 4); } }
void SkeletalTracker::LoadPosition( NUI_SKELETON_DATA * pSkel, ArmPosition * pArmPosition) { int i = NUI_SKELETON_POSITION_HAND_RIGHT; NuiTransformSkeletonToDepthImage( pSkel->SkeletonPositions[i], &(pArmPosition->m_rightHand.m_x), &(pArmPosition->m_rightHand.m_y), (USHORT*) &( pArmPosition->m_rightHand.m_z) ); i = NUI_SKELETON_POSITION_HAND_LEFT; NuiTransformSkeletonToDepthImage( pSkel->SkeletonPositions[i], &(pArmPosition->m_leftHand.m_x), &(pArmPosition->m_leftHand.m_y), (USHORT*) &(pArmPosition->m_leftHand.m_z) ); i = NUI_SKELETON_POSITION_HEAD; NuiTransformSkeletonToDepthImage( pSkel->SkeletonPositions[i], &(pArmPosition->m_body.m_x), &(pArmPosition->m_body.m_y), (USHORT*) &(pArmPosition->m_body.m_z) ); pArmPosition->m_startTime = getMilliCount(); }
/// <summary> /// Find closest skeleton to set tracked /// </summary> /// <param name="trackIDs">Array of skeleton tracking IDs</param> void UKinect::ChooseClosestSkeletons(DWORD trackIDs[TrackIDIndexCount]) { ZeroMemory(trackIDs, TrackIDIndexCount * sizeof(DWORD)); // Initial depth array with max posible value USHORT nearestDepth[TrackIDIndexCount] = { NUI_IMAGE_DEPTH_MAXIMUM, NUI_IMAGE_DEPTH_MAXIMUM }; for (int i = 0; i < NUI_SKELETON_COUNT; i++) { if (NUI_SKELETON_NOT_TRACKED != skeletonFrame.SkeletonData[i].eTrackingState) { LONG x, y; USHORT depth; // Transform skeleton coordinates to depth image NuiTransformSkeletonToDepthImage(skeletonFrame.SkeletonData[i].Position, &x, &y, &depth); // Compare depth to peviously found item if (depth < nearestDepth[FirstTrackID]) { // Move depth and track ID in first place to second place and assign with the new closer one nearestDepth[SecondTrackID] = nearestDepth[FirstTrackID]; nearestDepth[FirstTrackID] = depth; trackIDs[SecondTrackID] = trackIDs[FirstTrackID]; trackIDs[FirstTrackID] = skeletonFrame.SkeletonData[i].dwTrackingID; } else if (depth < nearestDepth[SecondTrackID]) { // Replace old depth and track ID in second place with the newly found closer one nearestDepth[SecondTrackID] = depth; trackIDs[SecondTrackID] = skeletonFrame.SkeletonData[i].dwTrackingID; } } } }
int KinectImpl::setTrackingPos(NUI_SKELETON_DATA data, int skelId, int trackingId) { float x,y; if(data.eSkeletonPositionTrackingState[skelId] == NUI_SKELETON_POSITION_NOT_TRACKED){ return -1; } if( data.SkeletonPositions[skelId].x == 0 && data.SkeletonPositions[skelId].y == 0 && data.SkeletonPositions[skelId].z == 0) return -1; NuiTransformSkeletonToDepthImage(data.SkeletonPositions[skelId], &x, &y, NUI_IMAGE_RESOLUTION_320x240 ); trackingData[trackingId*2] = (short)x; trackingData[trackingId*2+1] = (short)y; // std::cout << " (" << trackingData[trackingId*2] <<"," << trackingData[trackingId*2+1] << ")" ; skeletonData[trackingId*3] = data.SkeletonPositions[skelId].x; skeletonData[trackingId*3+1] = data.SkeletonPositions[skelId].y; skeletonData[trackingId*3+2] = data.SkeletonPositions[skelId].z; return 1; }
/// <summary> /// Draw a circle to indicate a skeleton of which only position info is available /// </summary> /// <param name="skeletonData">Skeleton coordinates</param> /// <param name="imageRect">The rect which the color or depth stream image is streched to fit</param> void UKinect::drawPosition(const NUI_SKELETON_DATA& skeletonData) { LONG xd, yd, x, y; USHORT depth; NuiTransformSkeletonToDepthImage(skeletonData.Position, &xd, &yd, &depth, (NUI_IMAGE_RESOLUTION)depthResolution.as<int>()); NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution((NUI_IMAGE_RESOLUTION)colorResolution.as<int>(), (NUI_IMAGE_RESOLUTION)depthResolution.as<int>(), NULL, xd, yd, depth, &x, &y); circle(skeletonCVMat, Point(static_cast<int>(x), static_cast<int>(y)), 10, CV_RGB(0, 50, 200), 2); putText(skeletonCVMat, lexical_cast<string>(skeletonData.dwTrackingID), Point(x, y), 1, 2, CV_RGB(255, 255, 255), 2); }
Vec2i Kinect::getSkeletonVideoPos( const ci::Vec3f &position ) { float x; float y; Vector4 pos; pos.x = position.x; pos.y = position.y; pos.z = position.z; pos.w = 1.0f; NuiTransformSkeletonToDepthImage( pos, &x, &y, mDeviceOptions.getVideoResolution() ); return Vec2i( (int32_t)x, (int32_t)y ); }
ofVec3f KinectManager::handPositionToScreenPosition(Vector4 _pos) { ofVec3f pos = ofVec3f(); float xPos; float yPos; NuiTransformSkeletonToDepthImage(_pos, &xPos, &yPos, NUI_IMAGE_RESOLUTION_1280x960); pos.x = xPos / 1280 * ofGetWindowWidth(); pos.y = yPos / 960 * ofGetWindowHeight(); return pos; }
/// <summary> /// Converts a skeleton point to screen space /// </summary> /// <param name="skeletonPoint">skeleton point to tranform</param> /// <param name="width">width (in pixels) of output buffer</param> /// <param name="height">height (in pixels) of output buffer</param> /// <returns>point in screen-space</returns> D2D1_POINT_2F CSkeletonBasics::SkeletonToScreen(Vector4 skeletonPoint, int width, int height) { LONG x, y; USHORT depth; // Calculate the skeleton's position on the screen // NuiTransformSkeletonToDepthImage returns coordinates in NUI_IMAGE_RESOLUTION_320x240 space NuiTransformSkeletonToDepthImage(skeletonPoint, &x, &y, &depth); float screenPointX = static_cast<float>(x * width) / cScreenWidth; float screenPointY = static_cast<float>(y * height) / cScreenHeight; return D2D1::Point2F(screenPointX, screenPointY); }
GLfloat* SkeletonToScreen(Vector4 skeletonPoint) { LONG x, y; USHORT depth; // Calculate the skeleton's position on the screen // NuiTransformSkeletonToDepthImage returns coordinates in NUI_IMAGE_RESOLUTION_320x240 space NuiTransformSkeletonToDepthImage(skeletonPoint, &x, &y, &depth); float screenPointX = static_cast<float>(x * width) / 320; float screenPointY = static_cast<float>(y * height) / 240; GLfloat pointIn2D[2] = { screenPointX, screenPointY }; return pointIn2D; }
QPointF MapToScreen(const Vector4 &point) { //@to-do: It's not fixed const NUI_IMAGE_RESOLUTION resolution = NUI_IMAGE_RESOLUTION_640x480; long x, y; ushort depth; NuiTransformSkeletonToDepthImage(point, &x, &y, &depth, resolution); // Returns coordinates in depth space long backupX = x, backupY = y; if (FAILED(NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution(resolution, resolution, NULL, x, y, depth, &x, &y))) { x = backupX; y = backupY; } return QPointF(x, y); }
point moduleCapture::SkeletonPointToScreen(Vector4 skeletonPoint, int height, int width) { LONG x, y; USHORT depth; // Calculate the skeleton's position on the screen // NuiTransformSkeletonToDepthImage returns coordinates in NUI_IMAGE_RESOLUTION_320x240 space NuiTransformSkeletonToDepthImage(skeletonPoint, &x, &y, &depth); float screenPointX = static_cast<float>(x * width) / 320; float screenPointY = static_cast<float>(y * height) / 240; point p; p.x = screenPointX; p.y = screenPointY; return p; }
virtual void convertWorldToProjective( const uint32_t deviceID, const uint32_t coordinateCount, const _2RealVector3f* inWorld, _2RealVector3f* outProjective ) { checkDeviceRunning(deviceID, "_2RealImplOpenNI::convertWorldToProjective()" ); //fetching and writing data Vector4 in; USHORT depth; for( uint32_t i=0; i<coordinateCount; ++i ) { in.x = inWorld[i].x; in.y = inWorld[i].y; in.z = inWorld[i].z; LONG x,y; NuiTransformSkeletonToDepthImage( in, &x, &y, &depth ); outProjective[i].x = (float)x; outProjective[i].y = (float)y; outProjective[i].z = depth; } }
void UKinect::drawHand(DWORD ID, int joint, int _catch, bool pressed, bool active, double press) { LONG x, y, xd, yd; USHORT depth; Vector4 vPoint = {0,0,0,0}; vector<double> tmp = skeletonJointPosition(ID, joint); if (tmp.size()==0) return; vPoint.x = tmp[0]; vPoint.y = tmp[1]; vPoint.z = tmp[2]; NuiTransformSkeletonToDepthImage(vPoint, &xd, &yd, &depth, (NUI_IMAGE_RESOLUTION)depthResolution.as<int>()); hr = NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution((NUI_IMAGE_RESOLUTION)colorResolution.as<int>(), (NUI_IMAGE_RESOLUTION)depthResolution.as<int>(), NULL, xd, yd, depth, &x, &y); if (FAILED(hr)) return; if (press>2) press=2; press=press*125; circle(interCVMat,Point(static_cast<int>(x),static_cast<int>(y)),10,CV_RGB( static_cast<int>(press),static_cast<int>(press),static_cast<int>(press)),-1); if (1 == _catch) { if (pressed){ circle(interCVMat,Point(static_cast<int>(x),static_cast<int>(y)),15,CV_RGB( 0, 0, 255),-1); } else if (active) { circle(interCVMat,Point(static_cast<int>(x),static_cast<int>(y)),15,CV_RGB( 0, 255 ,0),-1); } else { circle(interCVMat,Point(static_cast<int>(x),static_cast<int>(y)),15,CV_RGB( 255, 0, 0),-1); } } else { if (pressed){ circle(interCVMat,Point(static_cast<int>(x),static_cast<int>(y)),15,CV_RGB( 0, 0, 255),3); } else if (active) { circle(interCVMat,Point(static_cast<int>(x),static_cast<int>(y)),15,CV_RGB( 0, 255, 0),3); } else { circle(interCVMat,Point(static_cast<int>(x),static_cast<int>(y)),15,CV_RGB( 255, 0, 0),3); } } }
bool KinectHandler::handleJoints( NUI_SKELETON_DATA& skeletonData ) { POINT coordInDepth; USHORT depth = 0; OpenThreads::ScopedLock<OpenThreads::Mutex> lock(_mutex); for ( int i=0; i<NUI_SKELETON_POSITION_COUNT; ++i ) { osg::Vec3& point = (*_points)[i]; NuiTransformSkeletonToDepthImage( skeletonData.SkeletonPositions[i], &coordInDepth.x, &coordInDepth.y, &depth, KINECT_IMAGE_RESOLUTION ); point.x() = (double)coordInDepth.x / KINECT_IMAGE_WIDTH; // 0.0 - 1.0 point.y() = (double)coordInDepth.y / KINECT_IMAGE_HEIGHT; // 0.0 - 1.0 // Transform depth to [0, 1], assuming original depth from 0 to 4000 in millmeters point.z() = (double)(depth >> NUI_IMAGE_PLAYER_INDEX_SHIFT); point.z() *= 0.00025; // real value can't be less than 800, that is, 0.2 } _points->dirty(); return true; }
vector<double> UKinect::skeletonJointPositionOnImage(DWORD ID, int joint) { vector<double> position; if (&skeletonFrame != NULL) { for (int i = 0; i < NUI_SKELETON_COUNT; i++) { const NUI_SKELETON_DATA & skeletonData = skeletonFrame.SkeletonData[i]; if ((skeletonData.eTrackingState == NUI_SKELETON_TRACKED) && (skeletonData.dwTrackingID == ID) && (skeletonData.eSkeletonPositionTrackingState[joint] >= NUI_SKELETON_POSITION_INFERRED)) { LONG x, y, xd, yd; USHORT depth; NuiTransformSkeletonToDepthImage(skeletonData.SkeletonPositions[joint], &xd, &yd, &depth, (NUI_IMAGE_RESOLUTION)depthResolution.as<int>()); NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution((NUI_IMAGE_RESOLUTION)colorResolution.as<int>(), (NUI_IMAGE_RESOLUTION)depthResolution.as<int>(), NULL, xd, yd, depth, &x, &y); position.push_back(x); position.push_back(y); position.push_back(static_cast<double>(depth)); } } } return position; }
/// <summary> /// Draw a joint of the skeleton /// </summary> /// <param name="skeletonData">Skeleton coordinates</param> /// <param name="imageRect">The rect which the color or depth image is streched to fit</param> /// <param name="joint">Index for the joint to be drawn</param> void UKinect::drawJoint(const NUI_SKELETON_DATA& skeletonData, NUI_SKELETON_POSITION_INDEX joint) { NUI_SKELETON_POSITION_TRACKING_STATE state = skeletonData.eSkeletonPositionTrackingState[joint]; // Not tracked if (NUI_SKELETON_POSITION_NOT_TRACKED == state) { return; } LONG x, y, xd, yd; USHORT depth; NuiTransformSkeletonToDepthImage(skeletonData.SkeletonPositions[joint], &xd, &yd, &depth, (NUI_IMAGE_RESOLUTION)depthResolution.as<int>()); NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution((NUI_IMAGE_RESOLUTION)colorResolution.as<int>(), (NUI_IMAGE_RESOLUTION)depthResolution.as<int>(), NULL, xd, yd, depth, &x, &y); if (NUI_SKELETON_POSITION_TRACKED == state) { circle(skeletonCVMat, Point(static_cast<int>(x), static_cast<int>(y)), 7, CV_RGB(0, 200, 0), -1); } else { circle(skeletonCVMat, Point(static_cast<int>(x), static_cast<int>(y)), 6, CV_RGB(0, 200, 0), 2); } // draw "L" and "R" informations near shoulders if (joint == 4) putText(skeletonCVMat, lexical_cast<string>("L"), Point(static_cast<int>(x), static_cast<int>(y) - 10), 1, 2, CV_RGB(255, 255, 255), 2); if (joint == 8) putText(skeletonCVMat, lexical_cast<string>("R"), Point(static_cast<int>(x) - 13, static_cast<int>(y) - 10), 1, 2, CV_RGB(255, 255, 255), 2); }
/* The matlab mex function */ void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[] ) { HRESULT hr; // Get pointer to Kinect handles unsigned __int64 *MXadress; if(nrhs==0) { mexErrMsgTxt("Give Pointer to Kinect as input"); } MXadress = (unsigned __int64*)mxGetData(prhs[0]); int depthwidth=(int)MXadress[7]; int depthheight=(int)MXadress[8]; // Initialize Output Skeleton Array int Jdimsc[2]; Jdimsc[0]=200; Jdimsc[1]=6; plhs[0] = mxCreateNumericArray(2, Jdimsc, mxDOUBLE_CLASS, mxREAL); double *Pos; Pos = mxGetPr(plhs[0]); NUI_SKELETON_FRAME SkeletonFrame; // Wait for a Skeleton_Frame to arrive hr = NuiSkeletonGetNextFrame( 200, &SkeletonFrame ); if( FAILED( hr ) ) { printf("Failed to get Frame\r\n"); } else { // Check if there is a Skeleton found bool NoSkeletonFound = true; for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ ) { if( SkeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED ) { NoSkeletonFound = false; } } if( NoSkeletonFound ) { return; } // Smooth the skeleton data NuiTransformSmooth(&SkeletonFrame,NULL); // Copy Skeleton points to output array int r=0; for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ ) { if( SkeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED ) { NUI_SKELETON_DATA * pSkel = &SkeletonFrame.SkeletonData[i]; int j; float fx=0,fy=0; for (j = 0; j < NUI_SKELETON_POSITION_COUNT; j++) { Pos[j+r]=i+1; Pos[j+r+Jdimsc[0]]=pSkel->SkeletonPositions[j].x; Pos[j+r+Jdimsc[0]*2]=pSkel->SkeletonPositions[j].y; Pos[j+r+Jdimsc[0]*3]=pSkel->SkeletonPositions[j].z; NuiTransformSkeletonToDepthImage( pSkel->SkeletonPositions[j], &fx, &fy ); Pos[j+r+Jdimsc[0]*4] = (double) ( fx * depthwidth + 0.5f ); Pos[j+r+Jdimsc[0]*5] = (double) ( fy * depthheight + 0.5f ); } r+=NUI_SKELETON_POSITION_COUNT; } } } }
int _tmain(int argc, _TCHAR* argv[]) { cv::setUseOptimized( true ); // Kinectのインスタンス生成、初期化 INuiSensor* pSensor; HRESULT hResult = S_OK; hResult = NuiCreateSensorByIndex( 0, &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiCreateSensorByIndex" << std::endl; return -1; } hResult = pSensor->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiInitialize" << std::endl; return -1; } // Colorストリーム HANDLE hColorEvent = INVALID_HANDLE_VALUE; HANDLE hColorHandle = INVALID_HANDLE_VALUE; hColorEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hColorEvent, &hColorHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( COLOR )" << std::endl; return -1; } // Depth&Playerストリーム HANDLE hDepthPlayerEvent = INVALID_HANDLE_VALUE; HANDLE hDepthPlayerHandle = INVALID_HANDLE_VALUE; hDepthPlayerEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hDepthPlayerEvent, &hDepthPlayerHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( DEPTH&PLAYER )" << std::endl; return -1; } // Skeletonストリーム HANDLE hSkeletonEvent = INVALID_HANDLE_VALUE; hSkeletonEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiSkeletonTrackingEnable( hSkeletonEvent, 0 ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiSkeletonTrackingEnable" << std::endl; return -1; } HANDLE hEvents[3] = { hColorEvent, hDepthPlayerEvent, hSkeletonEvent }; // カラーテーブル cv::Vec3b color[7]; color[0] = cv::Vec3b( 0, 0, 0 ); color[1] = cv::Vec3b( 255, 0, 0 ); color[2] = cv::Vec3b( 0, 255, 0 ); color[3] = cv::Vec3b( 0, 0, 255 ); color[4] = cv::Vec3b( 255, 255, 0 ); color[5] = cv::Vec3b( 255, 0, 255 ); color[6] = cv::Vec3b( 0, 255, 255 ); cv::namedWindow( "Color" ); cv::namedWindow( "Depth" ); cv::namedWindow( "Player" ); cv::namedWindow( "Skeleton" ); while( 1 ){ // フレームの更新待ち ResetEvent( hColorEvent ); ResetEvent( hDepthPlayerEvent ); ResetEvent( hSkeletonEvent ); WaitForMultipleObjects( ARRAYSIZE( hEvents ), hEvents, true, INFINITE ); // Colorカメラからフレームを取得 NUI_IMAGE_FRAME pColorImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hColorHandle, 0, &pColorImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( COLOR )" << std::endl; return -1; } // Depthセンサーからフレームを取得 NUI_IMAGE_FRAME pDepthPlayerImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hDepthPlayerHandle, 0, &pDepthPlayerImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( DEPTH&PLAYER )" << std::endl; return -1; } // Skeletonフレームを取得 NUI_SKELETON_FRAME pSkeletonFrame = { 0 }; hResult = pSensor->NuiSkeletonGetNextFrame( 0, &pSkeletonFrame ); if( FAILED( hResult ) ){ std::cout << "Error : NuiSkeletonGetNextFrame" << std::endl; return -1; } // Color画像データの取得 INuiFrameTexture* pColorFrameTexture = pColorImageFrame.pFrameTexture; NUI_LOCKED_RECT sColorLockedRect; pColorFrameTexture->LockRect( 0, &sColorLockedRect, nullptr, 0 ); // Depthデータの取得 INuiFrameTexture* pDepthPlayerFrameTexture = pDepthPlayerImageFrame.pFrameTexture; NUI_LOCKED_RECT sDepthPlayerLockedRect; pDepthPlayerFrameTexture->LockRect( 0, &sDepthPlayerLockedRect, nullptr, 0 ); // 表示 cv::Mat colorMat( 480, 640, CV_8UC4, reinterpret_cast<uchar*>( sColorLockedRect.pBits ) ); LONG registX = 0; LONG registY = 0; ushort* pBuffer = reinterpret_cast<ushort*>( sDepthPlayerLockedRect.pBits ); cv::Mat bufferMat = cv::Mat::zeros( 480, 640, CV_16UC1 ); cv::Mat playerMat = cv::Mat::zeros( 480, 640, CV_8UC3 ); for( int y = 0; y < 480; y++ ){ for( int x = 0; x < 640; x++ ){ pSensor->NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution( NUI_IMAGE_RESOLUTION_640x480, NUI_IMAGE_RESOLUTION_640x480, nullptr, x, y, *pBuffer, ®istX, ®istY ); if( ( registX >= 0 ) && ( registX < 640 ) && ( registY >= 0 ) && ( registY < 480 ) ){ bufferMat.at<ushort>( registY, registX ) = *pBuffer & 0xFFF8; playerMat.at<cv::Vec3b>( registY, registX ) = color[*pBuffer & 0x7]; } pBuffer++; } } cv::Mat depthMat( 480, 640, CV_8UC1 ); bufferMat.convertTo( depthMat, CV_8UC3, -255.0f / NUI_IMAGE_DEPTH_MAXIMUM, 255.0f ); cv::Mat skeletonMat = cv::Mat::zeros( 480, 640, CV_8UC3 ); cv::Point2f point; for( int count = 0; count < NUI_SKELETON_COUNT; count++ ){ NUI_SKELETON_DATA skeleton = pSkeletonFrame.SkeletonData[count]; if( skeleton.eTrackingState == NUI_SKELETON_TRACKED ){ for( int position = 0; position < NUI_SKELETON_POSITION_COUNT; position++ ){ NuiTransformSkeletonToDepthImage( skeleton.SkeletonPositions[position], &point.x, &point.y, NUI_IMAGE_RESOLUTION_640x480 ); cv::circle( skeletonMat, point, 10, static_cast<cv::Scalar>( color[count + 1] ), -1, CV_AA ); } } } cv::imshow( "Color", colorMat ); cv::imshow( "Depth", depthMat ); cv::imshow( "Player", playerMat ); cv::imshow( "Skeleton", skeletonMat ); // フレームの解放 pColorFrameTexture->UnlockRect( 0 ); pDepthPlayerFrameTexture->UnlockRect( 0 ); pSensor->NuiImageStreamReleaseFrame( hColorHandle, &pColorImageFrame ); pSensor->NuiImageStreamReleaseFrame( hDepthPlayerHandle, &pDepthPlayerImageFrame ); // ループの終了判定(Escキー) if( cv::waitKey( 30 ) == VK_ESCAPE ){ break; } } // Kinectの終了処理 pSensor->NuiShutdown(); pSensor->NuiSkeletonTrackingDisable(); CloseHandle( hColorEvent ); CloseHandle( hDepthPlayerEvent ); CloseHandle( hSkeletonEvent ); CloseHandle( hColorHandle ); CloseHandle( hDepthPlayerHandle ); cv::destroyAllWindows(); return 0; }
//------------------------------------------------------------------- // Nui_GotSkeletonAlert // // Handle new skeleton data //------------------------------------------------------------------- void SkeletalTracker::Nui_GotSkeletonAlert( ) { NUI_SKELETON_FRAME SkeletonFrame = {0}; bool bFoundSkeleton = false; if ( SUCCEEDED(NuiSkeletonGetNextFrame( 0, &SkeletonFrame )) ) // if ( SUCCEEDED(m_pNuiSensor->NuiSkeletonGetNextFrame( 0, &SkeletonFrame )) ) { for ( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ ) { // if( SkeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED || // (SkeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_POSITION_ONLY )) { bFoundSkeleton = true; } } } // no skeletons! if( !bFoundSkeleton ) { return; } // smooth out the skeleton data NUI_TRANSFORM_SMOOTH_PARAMETERS smoothParams; smoothParams.fSmoothing = 0.6f; smoothParams.fCorrection = 0.3f; smoothParams.fPrediction = 0.6f; smoothParams.fJitterRadius = 0.6f; smoothParams.fMaxDeviationRadius = 0.6f; HRESULT hr = NuiTransformSmooth(&SkeletonFrame,&smoothParams); // HRESULT hr = NuiTransformSmooth(&SkeletonFrame,&smoothParams); if ( FAILED(hr) ) { return; } // we found a skeleton, re-start the skeletal timer bool bSkeletonIdsChanged = false; USHORT currentMaxDepth = NUI_IMAGE_DEPTH_MAXIMUM; int selectedSkeleton = -1; for ( int i = 0 ; i < NUI_SKELETON_COUNT; i++ ) { if ( m_SkeletonIds[i] != SkeletonFrame.SkeletonData[i].dwTrackingID ) { m_SkeletonIds[i] = SkeletonFrame.SkeletonData[i].dwTrackingID; bSkeletonIdsChanged = true; } // Show skeleton only if it is tracked, and the center-shoulder joint is at least inferred. if ( SkeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED) { LONG x, y; USHORT depth; // Transform skeleton coordinates to depth image NuiTransformSkeletonToDepthImage(SkeletonFrame.SkeletonData[i].Position, &x, &y, &depth); if (depth < currentMaxDepth) { selectedSkeleton = i; currentMaxDepth = depth; } } else if ( SkeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_POSITION_ONLY ) { // OutputDebugString( L"Skeleton position only\r\n" ); } } if (selectedSkeleton > -1) { Nui_DrawSkeleton( &SkeletonFrame.SkeletonData[selectedSkeleton], NULL, selectedSkeleton ); } }
void MultiCursorAppCpp::detectHandPosition(CvBlobs blobs) { FLOAT offset = 0.03f; INT blobID = 0; for (CvBlobs::const_iterator it = blobs.begin(); it != blobs.end(); ++it) { int numIntersectionPoints = 0; Vector4 handPosition; handPosition.w = 1; handPosition.x = 0.0f; handPosition.y = 0.0f; handPosition.z = 0.0f; Point3f center3f = Point3_<FLOAT>(userData[blobID].headInfo.cameraPoint.x, userData[blobID].headInfo.cameraPoint.y, userData[blobID].headInfo.cameraPoint.z); // ユーザの領域内を探索 for (int y = it->second->miny; y <= it->second->maxy; y++) { for (int x = it->second->minx; x <= it->second->maxx; x++) { float length = sqrt( pow(center3f.x - point3fMatrix.ptr<float>(y, x)[0], 2) + pow(center3f.y - point3fMatrix.ptr<float>(y, x)[1], 2) //+ pow(center3f.z - point3fMatrix.ptr<float>(y, x)[2], 2) ); // Define the intersection point of the sphere which its center is head and the hand as the hand position if (*heightMatrix.ptr<USHORT>(y, x) > userData[blobID].headInfo.height - HEAD_LENGTH - SHOULDER_LENGTH // 肩より高い点か && 0 < point3fMatrix.ptr<float>(y, x)[2]*1000 && point3fMatrix.ptr<float>(y, x)[2] * 1000 < KINECT_HEIGHT // Kinectの検出できる範囲の値か && it->first == labelMat.at<unsigned long>(y, x) // 同じblob内のみ探索 ) // Don't include desk { if (SENCIG_CIRCLE_RADIUS - offset < length ) { // 注目点が球と交差しているかどうか handPosition.x += point3fMatrix.ptr<float>(y, x)[0]; handPosition.y += point3fMatrix.ptr<float>(y, x)[1]; handPosition.z += point3fMatrix.ptr<float>(y, x)[2]; circle(userAreaMat, Point(x, y), 2, Scalar(255, 255, 0), -1); numIntersectionPoints++; } else if (length > SENCIG_CIRCLE_RADIUS) { #ifdef TRACK_GESTURE_BY_AREA ++userData[blobID].handInfo.area; // 手の面積を点の数で表す #ifdef USE_KINECT_V1 LONG handPositionX2d; LONG handPositionY2d; USHORT dis; NuiTransformSkeletonToDepthImage(handPosition, &handPositionX2d, &handPositionY2d, &dis, KINECT_RESOLUTION); circle(userAreaMat, Point(handPositionX2d, handPositionY2d), 7, Scalar(0, 200, 0), 3); #else DepthSpacePoint depthPoint; CameraSpacePoint cameraPoint; cameraPoint.X = point3fMatrix.ptr<float>(y, x)[0]; cameraPoint.Y = point3fMatrix.ptr<float>(y, x)[1]; cameraPoint.Z = point3fMatrix.ptr<float>(y, x)[2]; kinectBasics.GetMapper()->MapCameraPointToDepthSpace(cameraPoint, &depthPoint); int index = ((y * kinectBasics.widthDepth) + x) * 3; UCHAR* dataDepth = &userAreaMat.data[index]; dataDepth[0] = 0; dataDepth[1] = 200; dataDepth[2] = 255; //circle(userAreaMat, Point(depthPoint.X, depthPoint.Y), 1, Scalar(0, 255, 255), 3); #endif #endif } } } } if (numIntersectionPoints > 0) { userData[blobID].handInfo.isTracked = true; handPosition.x /= numIntersectionPoints; handPosition.y /= numIntersectionPoints; handPosition.z /= numIntersectionPoints; userData[blobID].handInfo.cameraPoint.x = handPosition.x; userData[blobID].handInfo.cameraPoint.y = handPosition.y; userData[blobID].handInfo.cameraPoint.z = handPosition.z; // Show the hand positions on the depth image #ifdef USE_KINECT_V1 LONG handPositionX2d; LONG handPositionY2d; USHORT dis; NuiTransformSkeletonToDepthImage(handPosition, &handPositionX2d, &handPositionY2d, &dis, KINECT_RESOLUTION); circle(userAreaMat, Point(handPositionX2d, handPositionY2d), 7, Scalar(0, 200, 0), 3); #else DepthSpacePoint depthPoint; CameraSpacePoint cameraPoint; cameraPoint.X = handPosition.x; cameraPoint.Y = handPosition.y; cameraPoint.Z = handPosition.z; kinectBasics.GetMapper()->MapCameraPointToDepthSpace(cameraPoint, &depthPoint); circle(userAreaMat, Point(depthPoint.X, depthPoint.Y), 7, Scalar(0, 200, 0), 3); #endif } else { userData[blobID].handInfo.isTracked = false; userData[blobID].handInfo.cameraPoint.x = 0.0f; userData[blobID].handInfo.cameraPoint.y = 0.0f; userData[blobID].handInfo.cameraPoint.z = 0.0f; } #ifdef TRACK_GESTURE_BY_AREA // 面積で手の開閉をチェック if (userData[blobID].handInfo.area != 0) { float distance = sqrt(pow(handPosition.x, 2) + pow(handPosition.y, 2) + pow(handPosition.z, 2)); userData[blobID].handInfo.area /= distance; } // cout << "area: " << userData[blobID].handInfo.area << endl; #endif blobID++; } // Replace preUserData by current userData / 前フレームのデータを現フレームのデータで置き換える preUserData.clear(); preUserData = userData; for (vector<UserData>::iterator p = preUserData.begin(); p != preUserData.end(); p++) { p->isDataFound = false; // 初期化 } }
void KinectImpl::drawNuiSkeleton(int width, int height, int playerID) { int scaleX = width; int scaleY = height; long x=0,y=0; unsigned short depth=0; float fx=0,fy=0; long cx=0,cy=0; float display_posf[NUI_SKELETON_POSITION_COUNT][2]; for (int i = 0; i < NUI_SKELETON_POSITION_COUNT; i++) { switch(showedTextureIndex){ case IMAGE_TEXTURE: NuiTransformSkeletonToDepthImage( skels[playerID][i], &x, &y, &depth); NuiImageGetColorPixelCoordinatesFromDepthPixel(NUI_IMAGE_RESOLUTION_640x480, NULL, x, y, depth, &cx, &cy); display_posf[i][0] = 1.0f - (cx / 640.0f) * 2.0f; display_posf[i][1] = 1.0f - (cy / 480.0f) * 2.0f; break; case DEPTH_TEXTURE: NuiTransformSkeletonToDepthImage( skels[playerID][i], &fx, &fy); display_posf[i][0] = 1.0f - fx / 320.0f * 2.0f; display_posf[i][1] = 1.0f - fy / 240.0f * 2.0f; break; default: break; } } glColor3ub(255, 255, 0); glLineWidth(6); glBegin(GL_LINE_STRIP); glVertex2f( display_posf[NUI_SKELETON_POSITION_HIP_CENTER][0], display_posf[NUI_SKELETON_POSITION_HIP_CENTER][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_SPINE][0], display_posf[NUI_SKELETON_POSITION_SPINE][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_SHOULDER_CENTER][0], display_posf[NUI_SKELETON_POSITION_SHOULDER_CENTER][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_HEAD][0], display_posf[NUI_SKELETON_POSITION_HEAD][1]); glEnd(); glBegin(GL_LINE_STRIP); glVertex2f( display_posf[NUI_SKELETON_POSITION_SHOULDER_CENTER][0], display_posf[NUI_SKELETON_POSITION_SHOULDER_CENTER][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_SHOULDER_LEFT][0], display_posf[NUI_SKELETON_POSITION_SHOULDER_LEFT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_ELBOW_LEFT][0], display_posf[NUI_SKELETON_POSITION_ELBOW_LEFT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_WRIST_LEFT][0], display_posf[NUI_SKELETON_POSITION_WRIST_LEFT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_HAND_LEFT][0], display_posf[NUI_SKELETON_POSITION_HAND_LEFT][1]); glEnd(); glBegin(GL_LINE_STRIP); glVertex2f( display_posf[NUI_SKELETON_POSITION_SHOULDER_CENTER][0], display_posf[NUI_SKELETON_POSITION_SHOULDER_CENTER][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_SHOULDER_RIGHT][0], display_posf[NUI_SKELETON_POSITION_SHOULDER_RIGHT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_ELBOW_RIGHT][0], display_posf[NUI_SKELETON_POSITION_ELBOW_RIGHT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_WRIST_RIGHT][0], display_posf[NUI_SKELETON_POSITION_WRIST_RIGHT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_HAND_RIGHT][0], display_posf[NUI_SKELETON_POSITION_HAND_RIGHT][1]); glEnd(); glBegin(GL_LINE_STRIP); glVertex2f( display_posf[NUI_SKELETON_POSITION_HIP_CENTER][0], display_posf[NUI_SKELETON_POSITION_HIP_CENTER][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_HIP_LEFT][0], display_posf[NUI_SKELETON_POSITION_HIP_LEFT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_KNEE_LEFT][0], display_posf[NUI_SKELETON_POSITION_KNEE_LEFT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_ANKLE_LEFT][0], display_posf[NUI_SKELETON_POSITION_ANKLE_LEFT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_FOOT_LEFT][0], display_posf[NUI_SKELETON_POSITION_FOOT_LEFT][1]); glEnd(); glBegin(GL_LINE_STRIP); glVertex2f( display_posf[NUI_SKELETON_POSITION_HIP_CENTER][0], display_posf[NUI_SKELETON_POSITION_HIP_CENTER][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_HIP_RIGHT][0], display_posf[NUI_SKELETON_POSITION_HIP_RIGHT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_KNEE_RIGHT][0], display_posf[NUI_SKELETON_POSITION_KNEE_RIGHT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_ANKLE_RIGHT][0], display_posf[NUI_SKELETON_POSITION_ANKLE_RIGHT][1]); glVertex2f( display_posf[NUI_SKELETON_POSITION_FOOT_RIGHT][0], display_posf[NUI_SKELETON_POSITION_FOOT_RIGHT][1]); glEnd(); glColor3ub(0, 0, 0); }
void measureBody(NUI_Controller* mNui) { //Head Width Measurement NUI_Vector4 head,neck,lShoulder,rShoulder,lFoot,rFoot,lHip,rHip,lElbow,rElbow,lHand,rHand,torso ; head=mNui->m_Points[NUI_SKELETON_POSITION_HEAD]; LONG projHeadX,projHeadY; USHORT depth; NuiTransformSkeletonToDepthImage( head, &projHeadX, &projHeadY, &depth ,mNui->m_DepthResolution); UCHAR* headPtr=(UCHAR*)( uImage->imageData+projHeadY*uImage->widthStep+projHeadX); USHORT* headDepthPtr=(USHORT*) (dImage->imageData+projHeadY*dImage->widthStep+projHeadX*2); UCHAR* iPtr=headPtr; LONG leftX=projHeadX; LONG rightX=projHeadX; LONG leftStep=0; LONG rightStep=0; while(*(--iPtr)>0 && 0<leftX) { leftX--; //Extend the line horizontally until it reaches the borders of the head. leftStep++; } iPtr=headPtr; while(*(++iPtr)>0 && rightX<(m_Width-1)) { rightX++; rightStep++; } NUI_Vector4 leftHead=NuiTransformDepthImageToSkeleton(leftX,projHeadY,*(headDepthPtr-leftStep),mNui->m_DepthResolution); NUI_Vector4 rightHead=NuiTransformDepthImageToSkeleton(rightX,projHeadY,*(headDepthPtr+leftStep),mNui->m_DepthResolution); bodyMeasurements[HEAD_WIDTH]=abs(rightHead.x-leftHead.y); //Head Height Measurement neck=mNui->m_Points[NUI_SKELETON_POSITION_SHOULDER_CENTER]; bodyMeasurements[HEAD_HEIGHT]=abs(head.y-neck.y); //Body Height Measurement lFoot=mNui->m_Points[NUI_SKELETON_POSITION_FOOT_LEFT]; rFoot=mNui->m_Points[NUI_SKELETON_POSITION_FOOT_RIGHT]; float lowPointY=(lFoot.y+rFoot.y)/2; iPtr=headPtr-uImage->widthStep; //Initialize the pointer 1 pixel above, since decrement will take place after comparison. LONG topY; int topStep=0; while(*iPtr>0 && (topY>0)) { iPtr-=uImage->widthStep; topY--; topStep++; } NUI_Vector4 topPoint=NuiTransformDepthImageToSkeleton(projHeadX,topY,*(headDepthPtr - topStep*dImage->widthStep/2),mNui->m_DepthResolution); bodyMeasurements[BODY_HEIGHT]=abs(topPoint.y-lowPointY); //Hip Height Measurement lHip=mNui->m_Points[NUI_SKELETON_POSITION_HIP_LEFT]; rHip=mNui->m_Points[NUI_SKELETON_POSITION_HIP_RIGHT]; bodyMeasurements[HIP_HEIGHT]=(abs(lHip.y-lFoot.y)+abs(rHip.y-lHip.y))/2; //Elbow-Fingertip Measurement lElbow=mNui->m_Points[NUI_SKELETON_POSITION_ELBOW_LEFT]; rElbow=mNui->m_Points[NUI_SKELETON_POSITION_ELBOW_RIGHT]; lHand=mNui->m_Points[NUI_SKELETON_POSITION_HAND_LEFT]; rHand=mNui->m_Points[NUI_SKELETON_POSITION_HAND_RIGHT]; LONG lHandUpX,rHandUpX,lHandDownX,rHandDownX,lHandUpY,lHandDownY,rHandUpY,rHandDownY; NuiTransformSkeletonToDepthImage( lHand, &lHandUpX, &lHandUpY, &depth ,mNui->m_DepthResolution); NuiTransformSkeletonToDepthImage( rHand, &rHandUpX, &rHandUpY, &depth ,mNui->m_DepthResolution); NuiTransformSkeletonToDepthImage( lElbow, &lHandDownX, &lHandDownY, &depth ,mNui->m_DepthResolution); NuiTransformSkeletonToDepthImage( rElbow, &rHandDownX, &rHandDownY, &depth ,mNui->m_DepthResolution); UCHAR* lHandPtr=(UCHAR*)( uImage->imageData+(int)(lHandUpY-1)*uImage->widthStep+(int)lHandUpX);//Initialize the pointer 1 pixel above, since decrement will take place after comparison. while(*lHandPtr>0 && (lHandUpY>0)) { lHandPtr-=uImage->widthStep; lHandUpY--; } //Extend the line vertically until it reaches the borders of the arm. UCHAR* lElbowPtr=(UCHAR*) (uImage->imageData+(int)(lHandDownY+1)*uImage->widthStep+(int)lHandDownX);//Initialize the pointer 1 pixel above, since decrement will take place after comparison. while(*lElbowPtr>0 && (lHandDownY<(m_Width-1))) { lElbowPtr+=uImage->widthStep; lHandDownY++; } //Extend the line vertically until it reaches the borders of the arm. UCHAR* rHandPtr=(UCHAR*) (uImage->imageData+(int)(rHandUpY-1)*uImage->widthStep+(int)rHandUpX);//Initialize the pointer 1 pixel above, since decrement will take place after comparison. while(*rHandPtr>0 && (rHandUpY>0)) { rHandPtr-=uImage->widthStep; rHandUpY--; } //Extend the line vertically until it reaches the borders of the arm. UCHAR* rElbowPtr=(UCHAR*)(uImage->imageData+(int)(rHandDownY+1)*uImage->widthStep+(int)rHandDownX);//Initialize the pointer 1 pixel above, since decrement will take place after comparison. while(*rElbowPtr>0 && (rHandDownY<(m_Height-1))) { rElbowPtr+=uImage->widthStep; rHandDownY++; } //Extend the line vertically until it reaches the borders of the arm. USHORT* depthPtr=(USHORT*) (dImage->imageData); NUI_Vector4 leftArmDown=NuiTransformDepthImageToSkeleton(lHandDownX,lHandDownY,*(depthPtr+lHandDownY*dImage->widthStep/2+lHandDownX),mNui->m_DepthResolution); NUI_Vector4 leftArmUp=NuiTransformDepthImageToSkeleton(lHandUpX,lHandUpY,*(depthPtr+lHandUpY*dImage->widthStep/2+lHandUpX),mNui->m_DepthResolution); NUI_Vector4 rightArmDown=NuiTransformDepthImageToSkeleton(rHandDownX,rHandDownY,*(depthPtr+rHandDownY*dImage->widthStep/2+rHandDownX),mNui->m_DepthResolution); NUI_Vector4 rightArmUp=NuiTransformDepthImageToSkeleton(rHandUpX,rHandUpY,*(depthPtr+rHandUpY*dImage->widthStep/2+rHandUpX),mNui->m_DepthResolution); bodyMeasurements[ELBOW_FINGERTIP]=(abs(rightArmUp.y-rightArmDown.y)+abs(leftArmUp.y-leftArmDown.y))/2; //Wrist to Fingertip Measurement bodyMeasurements[WRIST_FINGERTIP]=abs(leftArmUp.y-mNui->m_Points[NUI_SKELETON_POSITION_WRIST_LEFT].y)+abs(rightArmUp.y-mNui->m_Points[NUI_SKELETON_POSITION_WRIST_RIGHT].y)/2; //SHoulder Width Measurement lShoulder=mNui->m_Points[NUI_SKELETON_POSITION_SHOULDER_LEFT]; rShoulder=mNui->m_Points[NUI_SKELETON_POSITION_SHOULDER_RIGHT]; bodyMeasurements[SHOULDER_WIDTH]=abs(rShoulder.x-lShoulder.x); //Hip Width Measurement LONG lHipX,lHipY,rHipX,rHipY; NuiTransformSkeletonToDepthImage(lHip,&lHipX,&lHipY,&depth,mNui->m_DepthResolution); NuiTransformSkeletonToDepthImage(rHip,&rHipX,&rHipY,&depth,mNui->m_DepthResolution); UCHAR* lEndPtr=(UCHAR*)(uImage->imageData+(int)lHipY*uImage->widthStep+(int)lHipX); leftX=lHipX; leftStep=0; while(*(--lEndPtr)>0 && leftX>0) { leftX--; //Extend the line horizontally until it reaches the borders of the head. leftStep++; } NUI_Vector4 leftHipEnd=NuiTransformDepthImageToSkeleton(leftX,lHipY,*((USHORT*)(dImage->imageData+(int)lHipY*dImage->widthStep+(int)leftX*2)),mNui->m_DepthResolution); UCHAR* rEndPtr=(UCHAR*) (uImage->imageData+(int)rHipY*uImage->widthStep+(int)rHipX); rightX=rHipX; rightStep=0; while(*(++rEndPtr)>0 && rightX<(m_Width-1)) { rightX++; //Extend the line horizontally until it reaches the borders of the head. rightStep++; } NUI_Vector4 rightHipEnd=NuiTransformDepthImageToSkeleton(rightX,rHipY,*((USHORT*)(dImage->imageData+(int)rHipY*dImage->widthStep+(int)rightX*2)),mNui->m_DepthResolution); bodyMeasurements[HIP_WIDTH]=abs(rightHipEnd.x-leftHipEnd.x); //Torso Height Measurement torso=mNui->m_Points[NUI_SKELETON_POSITION_SPINE]; bodyMeasurements[TORSO_HEIGHT]=abs(torso.y-lowPointY); }
void drawNuiSkeleton(int playerID) { int scaleX = DEFAULT_WIDTH; int scaleY = DEFAULT_HEIGHT; long x=0,y=0; unsigned short depth=0; float fx=0,fy=0; long cx=0,cy=0; int display_pos[NUI_SKELETON_POSITION_COUNT][2]; for (int i = 0; i < NUI_SKELETON_POSITION_COUNT; i++) { // Overlay on depth image //NuiTransformSkeletonToDepthImage( skels[playerID][i], &fx, &fy); //display_pos[i][0] = (int) ( fx / 320.0 * DEFAULT_WIDTH); //display_pos[i][1] = (int) ( fy / 240.0 * DEFAULT_HEIGHT); // Overlay on color image NuiTransformSkeletonToDepthImage( skels[playerID][i], &x, &y, &depth); NuiImageGetColorPixelCoordinatesFromDepthPixel(NUI_IMAGE_RESOLUTION_640x480, NULL, x, y, depth, &cx, &cy); display_pos[i][0] = (int) cx; display_pos[i][1] = (int) cy; } glColor3ub(255, 255, 0); glLineWidth(6); glBegin(GL_LINE_STRIP); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_HIP_CENTER][0], scaleY - display_pos[NUI_SKELETON_POSITION_HIP_CENTER][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_SPINE][0], scaleY - display_pos[NUI_SKELETON_POSITION_SPINE][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_SHOULDER_CENTER][0], scaleY - display_pos[NUI_SKELETON_POSITION_SHOULDER_CENTER][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_HEAD][0], scaleY - display_pos[NUI_SKELETON_POSITION_HEAD][1]); glEnd(); glBegin(GL_LINE_STRIP); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_SHOULDER_CENTER][0], scaleY - display_pos[NUI_SKELETON_POSITION_SHOULDER_CENTER][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_SHOULDER_LEFT][0], scaleY - display_pos[NUI_SKELETON_POSITION_SHOULDER_LEFT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_ELBOW_LEFT][0], scaleY - display_pos[NUI_SKELETON_POSITION_ELBOW_LEFT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_WRIST_LEFT][0], scaleY - display_pos[NUI_SKELETON_POSITION_WRIST_LEFT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_HAND_LEFT][0], scaleY - display_pos[NUI_SKELETON_POSITION_HAND_LEFT][1]); glEnd(); glBegin(GL_LINE_STRIP); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_SHOULDER_CENTER][0], scaleY - display_pos[NUI_SKELETON_POSITION_SHOULDER_CENTER][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_SHOULDER_RIGHT][0], scaleY - display_pos[NUI_SKELETON_POSITION_SHOULDER_RIGHT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_ELBOW_RIGHT][0], scaleY - display_pos[NUI_SKELETON_POSITION_ELBOW_RIGHT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_WRIST_RIGHT][0], scaleY - display_pos[NUI_SKELETON_POSITION_WRIST_RIGHT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_HAND_RIGHT][0], scaleY - display_pos[NUI_SKELETON_POSITION_HAND_RIGHT][1]); glEnd(); glBegin(GL_LINE_STRIP); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_HIP_CENTER][0], scaleY - display_pos[NUI_SKELETON_POSITION_HIP_CENTER][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_HIP_LEFT][0], scaleY - display_pos[NUI_SKELETON_POSITION_HIP_LEFT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_KNEE_LEFT][0], scaleY - display_pos[NUI_SKELETON_POSITION_KNEE_LEFT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_ANKLE_LEFT][0], scaleY - display_pos[NUI_SKELETON_POSITION_ANKLE_LEFT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_FOOT_LEFT][0], scaleY - display_pos[NUI_SKELETON_POSITION_FOOT_LEFT][1]); glEnd(); glBegin(GL_LINE_STRIP); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_HIP_CENTER][0], scaleY - display_pos[NUI_SKELETON_POSITION_HIP_CENTER][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_HIP_RIGHT][0], scaleY - display_pos[NUI_SKELETON_POSITION_HIP_RIGHT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_KNEE_RIGHT][0], scaleY - display_pos[NUI_SKELETON_POSITION_KNEE_RIGHT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_ANKLE_RIGHT][0], scaleY - display_pos[NUI_SKELETON_POSITION_ANKLE_RIGHT][1]); glVertex2i( scaleX - display_pos[NUI_SKELETON_POSITION_FOOT_RIGHT][0], scaleY - display_pos[NUI_SKELETON_POSITION_FOOT_RIGHT][1]); glEnd(); glColor3ub(0, 0, 0); }
void getSphereSizes(NUI_Controller* mNui) { int x_init=0; int y_init=0; int step=0; try { for (int i=0;i<16;i++) { NUI_SKELETON_POSITION_INDEX sJoint=nuiIDs[i]; for (int j=0;j<i;j++) //If joint is processed before, do not repeat it { if (nuiIDs[j]==sJoint) sphereRadii[i]=sphereRadii[j]; } NUI_Vector4 realPosition=mNui->m_Points[sJoint]; Vector2 endOfJoint; LONG x,y; USHORT depth; NuiTransformSkeletonToDepthImage( realPosition, &x, &y, &depth ,mNui->m_DepthResolution); x_init=x; y_init=y; step=0; int radius=0; //cvShowImage(windowName.c_str(),uImage); //cvSetMouseCallback(windowName.c_str(), mouseEvent, 0); //cvWaitKey(); endOfJoint.x=x; endOfJoint.y=x; UCHAR* iPtr=(UCHAR*)(uImage->imageData+y_init*uImage->widthStep+x_init); USHORT* dPtr=(USHORT*)(dImage->imageData+y_init*dImage->widthStep+x_init*2);//Multipy x_init by 2, since dImage is 16 bits- 2bytes while( step<m_Width) //Slowly enlarge the joint sphere until it reaches the end of a bone in one direction. { if (x_init-step>-1) { UCHAR tValue=*(iPtr-step); if( tValue!=0 ) { endOfJoint.x-=(step-1); NUI_Vector4 trueEnd=NuiTransformDepthImageToSkeleton(endOfJoint.x,endOfJoint.y,*(dPtr-(step-1)),mNui->m_DepthResolution); radius=abs(realPosition.x-trueEnd.x); break; } } if (x_init+step<m_Width) { UCHAR tValue=*(iPtr+step); if( tValue!=0) { endOfJoint.x+=(step-1); NUI_Vector4 trueEnd=NuiTransformDepthImageToSkeleton(endOfJoint.x,endOfJoint.y,*(dPtr+(step-1)),mNui->m_DepthResolution); radius=abs(realPosition.x-trueEnd.x); break; } } if (step<m_Height) { if (y_init-step>-1) { UCHAR tValue=*(iPtr-step*uImage->widthStep); if( tValue!=0 ) { endOfJoint.y-=(step-1); NUI_Vector4 trueEnd=NuiTransformDepthImageToSkeleton(endOfJoint.x,endOfJoint.y,*(dPtr-(step-1)*dImage->widthStep/2),mNui->m_DepthResolution); radius=abs(realPosition.y-trueEnd.y); break; } } if (y_init+step<m_Height) { UCHAR tValue=*(iPtr+step*uImage->widthStep); if( tValue!=0) { endOfJoint.y+=(step-1); NUI_Vector4 trueEnd=NuiTransformDepthImageToSkeleton(endOfJoint.x,endOfJoint.y,*(dPtr+(step-1)*dImage->widthStep/2),mNui->m_DepthResolution); radius=abs(realPosition.y-trueEnd.y); break; } } } step++; } sphereRadii[i]=radius; } } catch( Ogre::Exception& e ) { MessageBox( NULL, e.getFullDescription().c_str(), "An exception has occured!", MB_OK | MB_ICONERROR | MB_TASKMODAL); } catch(cv::Exception e) { MessageBox( NULL, e.err.c_str(), "An exception has occured!", MB_OK | MB_ICONERROR | MB_TASKMODAL); } }