int main(int argc, char** argv) { nite::UserTracker userTracker; nite::Status niteRc; nite::NiTE::initialize(); niteRc = userTracker.create(); if (niteRc != nite::STATUS_OK) { printf("Couldn't create user tracker\n"); return 3; } printf("\nStart moving around to get detected...\n(PSI pose may be required for skeleton calibration, depending on the configuration)\n"); nite::UserTrackerFrameRef userTrackerFrame; while (!wasKeyboardHit()) { niteRc = userTracker.readFrame(&userTrackerFrame); if (niteRc != nite::STATUS_OK) { printf("Get next frame failed\n"); continue; } const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers(); for (int i = 0; i < users.getSize(); ++i) { const nite::UserData& user = users[i]; updateUserState(user,userTrackerFrame.getTimestamp()); if (user.isNew()) { userTracker.startSkeletonTracking(user.getId()); } else if (user.getSkeleton().getState() == nite::SKELETON_TRACKED) { const nite::SkeletonJoint& head = user.getSkeleton().getJoint(nite::JOINT_HEAD); if (head.getPositionConfidence() > .5) printf("%d. (%5.2f, %5.2f, %5.2f)\n", user.getId(), head.getPosition().x, head.getPosition().y, head.getPosition().z); } } } nite::NiTE::shutdown(); }
void SampleViewer::Display() { // namespace bg = boost::geometry; nite::UserTrackerFrameRef userTrackerFrame; openni::VideoFrameRef depthFrame; nite::Status rc = m_pUserTracker->readFrame(&userTrackerFrame); if (rc != nite::STATUS_OK) { printf("GetNextData failed\n"); return; } depthFrame = userTrackerFrame.getDepthFrame(); if (m_pTexMap == NULL) { // Texture map init m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionX(), TEXTURE_SIZE); m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionY(), TEXTURE_SIZE); m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY]; } const nite::UserMap& userLabels = userTrackerFrame.getUserMap(); glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glMatrixMode(GL_PROJECTION); glPushMatrix(); glLoadIdentity(); glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0); if (depthFrame.isValid() && g_drawDepth) { calculateHistogram(m_pDepthHist, MAX_DEPTH, depthFrame); } memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel)); float factor[3] = {1, 1, 1}; // check if we need to draw depth frame to texture if (depthFrame.isValid() && g_drawDepth) { const nite::UserId* pLabels = userLabels.getPixels(); const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData(); openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX; int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel); for (int y = 0; y < depthFrame.getHeight(); ++y) { const openni::DepthPixel* pDepth = pDepthRow; openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX(); for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth, ++pTex, ++pLabels) { if (*pDepth != 0) { if (*pLabels == 0) { if (!g_drawBackground) { factor[0] = factor[1] = factor[2] = 0; } else { factor[0] = Colors[colorCount][0]; factor[1] = Colors[colorCount][1]; factor[2] = Colors[colorCount][2]; } } else { factor[0] = Colors[*pLabels % colorCount][0]; factor[1] = Colors[*pLabels % colorCount][1]; factor[2] = Colors[*pLabels % colorCount][2]; } // // Add debug lines - every 10cm // else if ((*pDepth / 10) % 10 == 0) // { // factor[0] = factor[2] = 0; // } int nHistValue = m_pDepthHist[*pDepth]; pTex->r = nHistValue*factor[0]; pTex->g = nHistValue*factor[1]; pTex->b = nHistValue*factor[2]; factor[0] = factor[1] = factor[2] = 1; } } pDepthRow += rowSize; pTexRow += m_nTexMapX; } } glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap); // Display the OpenGL texture map glColor4f(1,1,1,1); glEnable(GL_TEXTURE_2D); glBegin(GL_QUADS); g_nXRes = depthFrame.getVideoMode().getResolutionX(); g_nYRes = depthFrame.getVideoMode().getResolutionY(); // upper left glTexCoord2f(0, 0); glVertex2f(0, 0); // upper right glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0); glVertex2f(GL_WIN_SIZE_X, 0); // bottom right glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY); glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y); // bottom left glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY); glVertex2f(0, GL_WIN_SIZE_Y); glEnd(); glDisable(GL_TEXTURE_2D); const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers(); // 11.16.15 remove for loop for tracking only one user // for (int i = 0; i < users.getSize(); ++i) // { //const nite::UserData& user = users[i]; if (users.getSize() > 0) { const nite::UserData& user = users[0]; updateUserState(user, userTrackerFrame.getTimestamp()); if (user.isNew()) { m_pUserTracker->startSkeletonTracking(user.getId()); m_pUserTracker->startPoseDetection(user.getId(), nite::POSE_CROSSED_HANDS); } else if (!user.isLost()) { if (g_drawStatusLabel) { DrawStatusLabel(m_pUserTracker, user); } if (g_drawCenterOfMass) { DrawCenterOfMass(m_pUserTracker, user); } if (g_drawBoundingBox) { DrawBoundingBox(user); } if (user.getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawSkeleton) { DrawSkeleton(m_pUserTracker, user); } } if (m_poseUser == 0 || m_poseUser == user.getId()) { const nite::PoseData& pose = user.getPose(nite::POSE_CROSSED_HANDS); if (pose.isEntered()) { // Start timer sprintf(g_generalMessage, "In exit pose. Keep it for %d second%s to exit\n", g_poseTimeoutToExit/1000, g_poseTimeoutToExit/1000 == 1 ? "" : "s"); printf("Counting down %d second to exit\n", g_poseTimeoutToExit/1000); m_poseUser = user.getId(); m_poseTime = userTrackerFrame.getTimestamp(); } else if (pose.isExited()) { memset(g_generalMessage, 0, sizeof(g_generalMessage)); printf("Count-down interrupted\n"); m_poseTime = 0; m_poseUser = 0; } else if (pose.isHeld()) { // tick if (userTrackerFrame.getTimestamp() - m_poseTime > g_poseTimeoutToExit * 1000) { printf("Count down complete. Exit...\n"); Finalize(); exit(2); } } } //user char buffer[80] = ""; // ---------------- calculate right shoulder 2 DOF ---------------------- int rightShoulderX, rightShoulderY, rightShoulderZ; rightShoulderX = user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().x - user.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER).getPosition().x; rightShoulderY = user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().y - user.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER).getPosition().y; rightShoulderZ = user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().z - user.getSkeleton().getJoint(nite::JOINT_RIGHT_SHOULDER).getPosition().z; Spherical rightShoulderSpherical; rightShoulderSpherical = Cartesian2Spherical(rightShoulderX, rightShoulderY, rightShoulderZ); int rightShoulderPhi, rightShoulderTheta; rightShoulderTheta = radian2Degree(rightShoulderSpherical.radianTheta, rightShoulderThetaInit); // horizontal rise it's -60 degree rightShoulderPhi = radian2Degree(rightShoulderSpherical.radianPhi, rightShoulderPitchInit); // when hand's down, it's 70 degree int rightShoulderYawNow = rightShoulderPhi * sin(rightShoulderTheta * PI / 180); int rightShoulderPitchNow = rightShoulderPhi * cos(rightShoulderTheta * PI / 180); sprintf(buffer,"(rightShoulderTheta=%d, rightShoulderYaw=%d, rightShoulderPitch=%d)", rightShoulderTheta, rightShoulderYawNow, rightShoulderPitchNow); glColor3f(1.0f, 0.0f, 0.0f); glRasterPos2i(20, 20); glPrintString(GLUT_BITMAP_HELVETICA_18, buffer); // ------------------- calculate right elbow 2 DOF ----------------------- int rightElbowX, rightElbowY, rightElbowZ; rightElbowX = user.getSkeleton().getJoint(nite::JOINT_RIGHT_HAND).getPosition().x - user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().x; rightElbowY = user.getSkeleton().getJoint(nite::JOINT_RIGHT_HAND).getPosition().y - user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().y; rightElbowZ = user.getSkeleton().getJoint(nite::JOINT_RIGHT_HAND).getPosition().z - user.getSkeleton().getJoint(nite::JOINT_RIGHT_ELBOW).getPosition().z; Spherical rightElbowSpherical; rightElbowSpherical = Cartesian2Spherical(rightElbowX, rightElbowY, rightElbowZ); int rightElbowThetaNow = - radian2Degree(rightElbowSpherical.radianTheta, rightElbowThetaInit); // reverse for system int rightElbowYawNow = radian2Degree(rightElbowSpherical.radianPhi, rightElbowYawInit); sprintf(buffer,"(rightElbowThetaNow=%d, rightElbowYawNow=%d)", rightElbowThetaNow, rightElbowYawNow); glColor3f(1.0f, 0.0f, 0.0f); glRasterPos2i(20, 60); glPrintString(GLUT_BITMAP_HELVETICA_18, buffer); // ---------------- calculate left shoulder 2 DOF ---------------------- int leftShoulderX, leftShoulderY, leftShoulderZ; leftShoulderX = user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().x - user.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER).getPosition().x; leftShoulderY = user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().y - user.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER).getPosition().y; leftShoulderZ = user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().z - user.getSkeleton().getJoint(nite::JOINT_LEFT_SHOULDER).getPosition().z; Spherical leftShoulderSpherical; leftShoulderSpherical = Cartesian2Spherical(leftShoulderX, leftShoulderY, leftShoulderZ); int leftShoulderPhi, leftShoulderTheta; leftShoulderTheta = radian2Degree(leftShoulderSpherical.radianTheta, leftShoulderThetaInit); // horizontal rise it's -60 degree leftShoulderPhi = radian2Degree(leftShoulderSpherical.radianPhi, leftShoulderPitchInit); // when hand's down, it's 70 degree // need to reverse in left side int leftShoulderYawNow = - leftShoulderPhi * sin(leftShoulderTheta * PI / 180); int leftShoulderPitchNow = - leftShoulderPhi * cos(leftShoulderTheta * PI / 180); sprintf(buffer,"(leftShoulderTheta=%d, leftShoulderYaw=%d, leftShoulderPitch=%d)", leftShoulderTheta, leftShoulderYawNow, leftShoulderPitchNow); glColor3f(1.0f, 0.0f, 0.0f); glRasterPos2i(20, 100); glPrintString(GLUT_BITMAP_HELVETICA_18, buffer); // ------------------- calculate left elbow 2 DOF ----------------------- int leftElbowX, leftElbowY, leftElbowZ; leftElbowX = user.getSkeleton().getJoint(nite::JOINT_LEFT_HAND).getPosition().x - user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().x; leftElbowY = user.getSkeleton().getJoint(nite::JOINT_LEFT_HAND).getPosition().y - user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().y; leftElbowZ = user.getSkeleton().getJoint(nite::JOINT_LEFT_HAND).getPosition().z - user.getSkeleton().getJoint(nite::JOINT_LEFT_ELBOW).getPosition().z; Spherical leftElbowSpherical; leftElbowSpherical = Cartesian2Spherical(leftElbowX, leftElbowY, leftElbowZ); int leftElbowThetaNow = radian2Degree(leftElbowSpherical.radianTheta, leftElbowThetaInit); int leftElbowYawNow = radian2Degree(leftElbowSpherical.radianPhi, leftElbowYawInit); sprintf(buffer,"(leftElbowTheta=%d, leftElbowYawNow=%d)", leftElbowThetaNow, leftElbowYawNow); glColor3f(1.0f, 0.0f, 0.0f); glRasterPos2i(20, 140); glPrintString(GLUT_BITMAP_HELVETICA_18, buffer); // ---------------- constraint movement and publish message ------------ int rightShoulderYawDiff = abs(rightShoulderYaw - rightShoulderYawNow); int rightShoulderPitchDiff = abs(rightShoulderPitch - rightShoulderPitchNow); int rightElbowThetaDiff = abs(rightElbowTheta - rightElbowThetaNow); int rightElbowYawDiff = abs(rightElbowYaw - rightElbowYawNow); int leftShoulderYawDiff = abs(leftShoulderYaw - leftShoulderYawNow); int leftShoulderPitchDiff = abs(leftShoulderPitch - leftShoulderPitchNow); int leftElbowThetaDiff = abs(leftElbowTheta - leftElbowThetaNow); int leftElbowYawDiff = abs(leftElbowYaw - leftElbowYawNow); if ((rightShoulderYawDiff < moveLimitDegree) && (rightShoulderPitchDiff < moveLimitDegree) && (rightElbowThetaDiff < moveLimitDegree) && (rightElbowYawDiff < moveLimitDegree) && (rightShoulderTheta >= 0)) { // in range then refresh robot angle rightShoulderYaw = rightShoulderYawNow; rightShoulderPitch = rightShoulderPitchNow; rightElbowTheta = rightElbowThetaNow; rightElbowYaw = rightElbowYawNow; // change the angle to 0 to 360 and publish rightShoulderYawPub = angleHandler(rightShoulderYaw); rightShoulderPitchPub = angleHandler(rightShoulderPitch); rightElbowThetaPub = angleHandler(rightElbowTheta); rightElbowYawPub = angleHandler(rightElbowYaw); sprintf(buffer,"tracking!"); glColor3f(0.0f, 0.0f, 1.0f); glRasterPos2i(20, 180); glPrintString(GLUT_BITMAP_HELVETICA_18, buffer); actionPublish(rightShoulderYawPub, rightShoulderPitchPub, rightElbowThetaPub, rightElbowYawPub, leftShoulderYawPub, leftShoulderPitchPub, leftElbowThetaPub, leftElbowYawPub); } else { sprintf(buffer,"soulder yaw: %d, soulder pitch: %d, elbow yaw: %d, rightShoulderTheta > 0: %d", rightShoulderYaw - rightShoulderYawNow, rightShoulderPitch - rightShoulderPitchNow, rightElbowYaw - rightElbowYawNow, rightShoulderTheta); glColor3f(1.0f, 0.0f, 0.5f); glRasterPos2i(20, 180); glPrintString(GLUT_BITMAP_HELVETICA_18, buffer); } if((leftShoulderYawDiff < moveLimitDegree) && (leftShoulderPitchDiff < moveLimitDegree) && (leftElbowThetaDiff < moveLimitDegree) && (leftElbowYawDiff < moveLimitDegree) && (leftShoulderTheta >= 0)) { leftShoulderYaw = leftShoulderYawNow; leftShoulderPitch = leftShoulderPitchNow; leftElbowTheta = leftElbowThetaNow; leftElbowYaw = leftElbowYawNow; leftShoulderYawPub = angleHandler(leftShoulderYaw); leftShoulderPitchPub = angleHandler(leftShoulderPitch); leftElbowThetaPub = angleHandler(leftElbowTheta); leftElbowYawPub = angleHandler(leftElbowYaw); sprintf(buffer,"tracking!"); glColor3f(0.0f, 0.3f, 1.0f); glRasterPos2i(20, 220); glPrintString(GLUT_BITMAP_HELVETICA_18, buffer); actionPublish(rightShoulderYawPub, rightShoulderPitchPub, rightElbowThetaPub, rightElbowYawPub, leftShoulderYawPub, leftShoulderPitchPub, leftElbowThetaPub, leftElbowYawPub); } else { sprintf(buffer,"soulder yaw: %d, soulder pitch: %d, elbow yaw: %d, leftShoulderTheta > 0: %d", leftShoulderYaw - leftShoulderYawNow, leftShoulderPitch - leftShoulderPitchNow, leftElbowYaw - leftElbowYawNow, leftShoulderTheta); glColor3f(0.7f, 0.8f, 2.0f); glRasterPos2i(20, 220); glPrintString(GLUT_BITMAP_HELVETICA_18, buffer); } } if (g_drawFrameId) { DrawFrameId(userTrackerFrame.getFrameIndex()); } if (g_generalMessage[0] != '\0') { char *msg = g_generalMessage; glColor3f(1.0f, 0.0f, 0.0f); glRasterPos2i(100, 20); glPrintString(GLUT_BITMAP_HELVETICA_18, msg); } // Swap the OpenGL display buffers glutSwapBuffers(); }
int main(int argc, char** argv) { cv::Mat bw; //= cv::Mat(cv::Size(320, 240), CV_8UC3, cv::Scalar(255, 255, 0)); openni::Device device; openni::VideoStream color; openni::VideoFrameRef colorFrame; openni::Status rc = openni::STATUS_OK; rc = openni::OpenNI::initialize(); rc = device.open(openni::ANY_DEVICE); rc = color.create(device, openni::SENSOR_COLOR); rc = color.start(); nite::UserTracker userTracker; nite::Status niteRc; nite::NiTE::initialize(); niteRc = userTracker.create(); if (niteRc != nite::STATUS_OK) { printf("Couldn't create user tracker\n"); cv::waitKey(0); return 3; } printf("\nStart moving around to get detected...\n(PSI pose may be required for skeleton calibration, depending on the configuration)\n"); nite::UserTrackerFrameRef userTrackerFrame; float min =10000.0; float max = -10000; cv::namedWindow("bw",1); Status status = STATUS_ERROR; while (true) { color.readFrame(&colorFrame); const openni::RGB888Pixel* imageBuffer = (const openni::RGB888Pixel*)colorFrame.getData(); bw.create(colorFrame.getHeight(),colorFrame.getWidth(),CV_8UC3); memcpy(bw.data,imageBuffer,3*colorFrame.getHeight()*colorFrame.getWidth()*sizeof(uint8_t)); niteRc = userTracker.readFrame(&userTrackerFrame); if (niteRc != nite::STATUS_OK) { printf("Get next frame failed\n"); continue; } const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers(); for (int i = 0; i < users.getSize(); ++i) { const nite::UserData& user = users[i]; updateUserState(user,userTrackerFrame.getTimestamp()); if (user.isNew()) { userTracker.startSkeletonTracking(user.getId()); } else if (user.getSkeleton().getState() == nite::SKELETON_TRACKED) { const nite::SkeletonJoint& head = user.getSkeleton().getJoint(nite::JOINT_HEAD); if (head.getPositionConfidence() > .5){ float xer = head.getPosition().x; if (xer < min) min = xer; else if(xer>max) max = xer; printf("%d. x=(%5.2f, y= %5.2f, z= %5.2f)\n", user.getId(), head.getPosition().x, head.getPosition().y, head.getPosition().z); float newX, newY; status = userTrackerFrame.convertJointCoordinatesToDepth(head.getPosition().x, head.getPosition().y, head.getPosition().z, &newX, &newY) | STATUS_OK; printf("%5.2f %5.2f)\n", newX, newY); cv::circle(bw, newX, newY)), 20, cv::Scalar(255,255,255), -1, 8, 0); } } } cv::imshow("bw", bw); int key = cv::waitKey(10); if (key==27 || status == STATUS_ERROR) break; }
void SampleViewer::Display() { if (g_pause) return; nite::UserTrackerFrameRef userTrackerFrame; nite::Status rc1 = m_pUserTracker->readFrame(&userTrackerFrame); if (rc1 != nite::STATUS_OK) { printf("GetNextData failed\n"); return; } openni::VideoFrameRef depthFrameSide = userTrackerFrame.getDepthFrame(); int height = depthFrameSide.getHeight(); int width = depthFrameSide.getWidth(); if (!label) { label = (int *)malloc(width*height*sizeof(int)); } openni::VideoFrameRef depthFrameTop; openni::Status rc2 = depthStreamTop.readFrame(&depthFrameTop); if (rc2 != openni::STATUS_OK) { printf("GetNextData failed\n"); return; } if (m_pTexMap == NULL) { // Texture map init m_nTexMapX = MIN_CHUNKS_SIZE(depthFrameSide.getVideoMode().getResolutionX(), TEXTURE_SIZE); m_nTexMapY = MIN_CHUNKS_SIZE(depthFrameSide.getVideoMode().getResolutionY(), TEXTURE_SIZE); m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY]; } const nite::UserMap& userLabels = userTrackerFrame.getUserMap(); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glMatrixMode(GL_PROJECTION); glPushMatrix(); glLoadIdentity(); glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -1.0, 1.0); if (depthFrameSide.isValid() && g_drawDepth) { calculateHistogram(m_pDepthHistSide, MAX_DEPTH, depthFrameSide); } memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel)); float factor[3] = {1, 1, 1}; // check if we need to draw depth frame to texture if (depthFrameSide.isValid() && g_drawDepth) { const nite::UserId* pLabels = userLabels.getPixels(); const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrameSide.getData(); openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrameSide.getCropOriginY() * m_nTexMapX; int rowSize = depthFrameSide.getStrideInBytes() / sizeof(openni::DepthPixel); for (int y = 0; y < height; ++y) { const openni::DepthPixel* pDepth = pDepthRow; openni::RGB888Pixel* pTex = pTexRow + depthFrameSide.getCropOriginX(); for (int x = 0; x < width; ++x, ++pDepth, ++pTex, ++pLabels) { if (*pDepth != 0) { if (*pLabels == 0) { if (!g_drawBackground) { factor[0] = factor[1] = factor[2] = 0; } else { factor[0] = Colors[colorCount][0]; factor[1] = Colors[colorCount][1]; factor[2] = Colors[colorCount][2]; } } else { factor[0] = Colors[*pLabels % colorCount][0]; factor[1] = Colors[*pLabels % colorCount][1]; factor[2] = Colors[*pLabels % colorCount][2]; } // // Add debug lines - every 10cm // else if ((*pDepth / 10) % 10 == 0) // { // factor[0] = factor[2] = 0; // } int nHistValue = m_pDepthHistSide[*pDepth]; pTex->r = nHistValue*factor[0]; pTex->g = nHistValue*factor[1]; pTex->b = nHistValue*factor[2]; factor[0] = factor[1] = factor[2] = 1; } } pDepthRow += rowSize; pTexRow += m_nTexMapX; } } const openni::DepthPixel *imgBufferSide = (const openni::DepthPixel *)depthFrameSide.getData(); const openni::DepthPixel *imgBufferTop = (const openni::DepthPixel *)depthFrameTop.getData(); calculateHistogram(m_pDepthHistTop, MAX_DEPTH, depthFrameTop); imgTop = Mat(depthFrameTop.getHeight(), depthFrameTop.getWidth(), CV_8UC3); Mat(depthFrameTop.getHeight(), depthFrameTop.getWidth(), CV_16U, (void *)imgBufferTop).convertTo(depthTop, CV_8U, 1.0/256); for (int i = 0; i < imgTop.rows; i++) { for (int j = 0; j < imgTop.cols; j++) { int val = (int)m_pDepthHistTop[imgBufferTop[j + i*imgTop.cols]]; imgTop.at<Vec3b>(i, j).val[0] = val; imgTop.at<Vec3b>(i, j).val[1] = val; imgTop.at<Vec3b>(i, j).val[2] = val; } } if (g_getBackground) bgSubtractor->processImages(depthTop); bgSubtractor->getMask(depthTop, mask); imshow("Mask", mask); glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap); // Display the OpenGL texture map glColor4f(1,1,1,1); glEnable(GL_TEXTURE_2D); glBegin(GL_QUADS); // 320x240 g_nXRes = depthFrameSide.getVideoMode().getResolutionX(); g_nYRes = depthFrameSide.getVideoMode().getResolutionY(); // upper left glTexCoord2f(0, 0); glVertex2f(0, 0); // upper right glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0); glVertex2f(GL_WIN_SIZE_X, 0); // bottom right glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY); glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y); // bottom left glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY); glVertex2f(0, GL_WIN_SIZE_Y); glEnd(); glDisable(GL_TEXTURE_2D); const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers(); float maxSize = -1; int maxIdx = -1; for (int i = 0; i < users.getSize(); ++i) { const nite::UserData &user = users[i]; if (!user.isVisible()) continue; if (getSize(user) > maxSize) { maxSize = getSize(user); maxIdx = i; } //printf("user %d: size=%f\n, lost=%d, new=%d, visible=%d\n", // i, getSize(user), user.isLost(), user.isNew(), user.isVisible()); } for (int i = 0; i < users.getSize(); ++i) { const nite::UserData &user = users[i]; updateUserState(user, userTrackerFrame.getTimestamp()); if (user.isNew()) { m_pUserTracker->startSkeletonTracking(user.getId()); m_pUserTracker->startPoseDetection(user.getId(), nite::POSE_CROSSED_HANDS); } else if (!user.isLost()) { if (g_drawStatusLabel) { DrawStatusLabel(m_pUserTracker, user); } if (g_drawCenterOfMass) { DrawCenterOfMass(m_pUserTracker, user); } if (g_drawBoundingBox) { DrawBoundingBox(user); } if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawSkeleton) { if (maxIdx == i) { DrawSkeleton(m_pUserTracker, user); sideSkel.setTo(Scalar(0, 0, 0)); drawSkeleton(sideSkel, sideJoints); topSkel.setTo(Scalar(0, 0, 0)); drawSkeleton(topSkel, topJoints); drawSkeleton(imgTop, topJoints); } } } // exit the program after a few seconds if PoseType == POSE_CROSSED_HANDS if (m_poseUser == 0 || m_poseUser == user.getId()) { const nite::PoseData& pose = user.getPose(nite::POSE_CROSSED_HANDS); if (pose.isEntered()) { // Start timer sprintf(g_generalMessage, "In exit pose. Keep it for %d second%s to exit\n", g_poseTimeoutToExit/1000, g_poseTimeoutToExit/1000 == 1 ? "" : "s"); printf("Counting down %d second to exit\n", g_poseTimeoutToExit/1000); m_poseUser = user.getId(); m_poseTime = userTrackerFrame.getTimestamp(); } else if (pose.isExited()) { memset(g_generalMessage, 0, sizeof(g_generalMessage)); printf("Count-down interrupted\n"); m_poseTime = 0; m_poseUser = 0; } else if (pose.isHeld()) { // tick if (userTrackerFrame.getTimestamp() - m_poseTime > g_poseTimeoutToExit * 1000) { printf("Count down complete. Exit...\n"); Finalize(); exit(2); } } } } if (g_drawFrameId) { DrawFrameId(userTrackerFrame.getFrameIndex()); } if (g_generalMessage[0] != '\0') { char *msg = g_generalMessage; glColor3f(1.0f, 0.0f, 0.0f); glRasterPos2i(100, 20); glPrintString(GLUT_BITMAP_HELVETICA_18, msg); } imshow("Side", sideSkel); imshow("Top", topSkel); imshow("DepthTop", imgTop); if (!g_getBackground) { knnsearch(topJoints, imgBufferTop, mask, labelTop, label, 320, 240); drawSkeleton(labelTop, topJoints); cv::resize(labelTop, labelTop, Size(), 2, 2); imshow("Label", labelTop); if (g_capture2) { // c style string path = outDir + "/depth-top" + to_string(nFrame) + ".txt"; FILE *f = fopen(path.c_str(), "w"); for (int i = 0; i < width*height; i++) { fprintf(f, "%u\n", imgBufferTop[i]); } fclose(f); path = outDir + "/depth-side" + to_string(nFrame) + ".txt"; f = fopen(path.c_str(), "w"); for (int i = 0; i < width*height; i++) { fprintf(f, "%u\n", imgBufferSide[i]); } fclose(f); path = outDir + "/joints-top" + to_string(nFrame) + ".txt"; f = fopen(path.c_str(), "w"); for (int i = 0; i < N_JOINTS; i++) { fprintf(f, "%f, %f, %f, %f, %f\n", topJoints[i][0], topJoints[i][1], topJoints[i][2], topJoints[i][3], topJoints[i][4]); } fclose(f); path = outDir + "/joints-side" + to_string(nFrame) + ".txt"; f = fopen(path.c_str(), "w"); for (int i = 0; i < N_JOINTS; i++) { fprintf(f, "%f, %f, %f, %f, %f\n", sideJoints[i][0], sideJoints[i][1], sideJoints[i][2], sideJoints[i][3], sideJoints[i][4]); } fclose(f); path = outDir + "/label-top" + to_string(nFrame) + ".txt"; f = fopen(path.c_str(), "w"); for (int i = 0; i < width*height; i++) { fprintf(f, "%d\n", label[i]); } fclose(f); path = outDir + "/label-side" + to_string(nFrame) + ".txt"; f = fopen(path.c_str(), "w"); const nite::UserId* labelsTop = userLabels.getPixels(); for (int i = 0; i < width*height; i++) { fprintf(f, "%d\n", (int)labelsTop[i]); } fclose(f); nFrame++; } } // Swap the OpenGL display buffers glutSwapBuffers(); }
void SampleViewer::Display() { nite::UserTrackerFrameRef userTrackerFrame; openni::VideoFrameRef depthFrame; nite::Status rc = m_pUserTracker->readFrame(&userTrackerFrame); if (rc != nite::STATUS_OK) { printf("GetNextData failed\n"); return; } depthFrame = userTrackerFrame.getDepthFrame(); m_colorStream.readFrame(&m_colorFrame); if (m_pTexMap == NULL) { // Texture map init m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionX(), TEXTURE_SIZE); m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionY(), TEXTURE_SIZE); m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY]; } const nite::UserMap& userLabels = userTrackerFrame.getUserMap(); glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glMatrixMode(GL_PROJECTION); glPushMatrix(); glLoadIdentity(); glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -2000.0, 2000.0); if (depthFrame.isValid() && g_drawDepth) { calculateHistogram(m_pDepthHist, MAX_DEPTH, depthFrame); } memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel)); // check if we need to draw image frame to texture if (m_eViewState == DISPLAY_MODE_IMAGE && m_colorFrame.isValid()) { const openni::RGB888Pixel* pImageRow = (const openni::RGB888Pixel*)m_colorFrame.getData(); openni::RGB888Pixel* pTexRow = m_pTexMap + m_colorFrame.getCropOriginY() * m_nTexMapX; int rowSize = m_colorFrame.getStrideInBytes() / sizeof(openni::RGB888Pixel); for (int y = 0; y < m_colorFrame.getHeight(); ++y) { const openni::RGB888Pixel* pImage = pImageRow; openni::RGB888Pixel* pTex = pTexRow + m_colorFrame.getCropOriginX(); for (int x = 0; x < m_colorFrame.getWidth(); ++x, ++pImage, ++pTex) { *pTex = *pImage; } pImageRow += rowSize; pTexRow += m_nTexMapX; } } float factor[3] = {1, 1, 1}; // check if we need to draw depth frame to texture if (depthFrame.isValid() && g_drawDepth) { const nite::UserId* pLabels = userLabels.getPixels(); const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData(); openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX; int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel); for (int y = 0; y < depthFrame.getHeight(); ++y) { const openni::DepthPixel* pDepth = pDepthRow; openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX(); for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth, ++pTex, ++pLabels) { if (*pDepth != 0) { if (*pLabels == 0) { if (!g_drawBackground) { factor[0] = factor[1] = factor[2] = 0; } else { factor[0] = Colors[colorCount][0]; factor[1] = Colors[colorCount][1]; factor[2] = Colors[colorCount][2]; } } else { factor[0] = Colors[*pLabels % colorCount][0]; factor[1] = Colors[*pLabels % colorCount][1]; factor[2] = Colors[*pLabels % colorCount][2]; } // // Add debug lines - every 10cm // else if ((*pDepth / 10) % 10 == 0) // { // factor[0] = factor[2] = 0; // } int nHistValue = m_pDepthHist[*pDepth]; pTex->r = nHistValue*factor[0]; pTex->g = nHistValue*factor[1]; pTex->b = nHistValue*factor[2]; factor[0] = factor[1] = factor[2] = 1; } } pDepthRow += rowSize; pTexRow += m_nTexMapX; } } glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP_SGIS, GL_TRUE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_nTexMapX, m_nTexMapY, 0, GL_RGB, GL_UNSIGNED_BYTE, m_pTexMap); // Display the OpenGL texture map glColor4f(1,1,1,1); glEnable(GL_TEXTURE_2D); glBegin(GL_QUADS); g_nXRes = depthFrame.getVideoMode().getResolutionX(); g_nYRes = depthFrame.getVideoMode().getResolutionY(); // upper left glTexCoord2f(0, 0); glVertex2f(0, 0); // upper right glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, 0); glVertex2f(GL_WIN_SIZE_X, 0); // bottom right glTexCoord2f((float)g_nXRes/(float)m_nTexMapX, (float)g_nYRes/(float)m_nTexMapY); glVertex2f(GL_WIN_SIZE_X, GL_WIN_SIZE_Y); // bottom left glTexCoord2f(0, (float)g_nYRes/(float)m_nTexMapY); glVertex2f(0, GL_WIN_SIZE_Y); glEnd(); glDisable(GL_TEXTURE_2D); const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers(); for (int i = 0; i < users.getSize(); ++i) { const nite::UserData& user = users[i]; updateUserState(user, userTrackerFrame.getTimestamp()); if (user.isNew()) { m_pUserTracker->startSkeletonTracking(user.getId()); m_pUserTracker->startPoseDetection(user.getId(), nite::POSE_CROSSED_HANDS); } else if (!user.isLost()) { if (g_drawStatusLabel) { DrawStatusLabel(m_pUserTracker, user); } if (g_drawCenterOfMass) { DrawCenterOfMass(m_pUserTracker, user); } if (g_drawBoundingBox) { DrawBoundingBox(user); } if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawSkeleton) { DrawSkeleton(m_pUserTracker, user); } if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawHat) { DrawHat(m_pUserTracker, user); } if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawCube) { DrawCube(m_pUserTracker, user); } if (users[i].getSkeleton().getState() == nite::SKELETON_TRACKED && g_drawCubeFront) { DrawCubeFront(m_pUserTracker, user); } } if (m_poseUser == 0 || m_poseUser == user.getId()) { const nite::PoseData& pose = user.getPose(nite::POSE_CROSSED_HANDS); if (pose.isEntered()) { // Start timer sprintf(g_generalMessage, "In exit pose. Keep it for %d second%s to exit\n", g_poseTimeoutToExit/1000, g_poseTimeoutToExit/1000 == 1 ? "" : "s"); printf("Counting down %d second to exit\n", g_poseTimeoutToExit/1000); m_poseUser = user.getId(); m_poseTime = userTrackerFrame.getTimestamp(); } else if (pose.isExited()) { memset(g_generalMessage, 0, sizeof(g_generalMessage)); printf("Count-down interrupted\n"); m_poseTime = 0; m_poseUser = 0; } else if (pose.isHeld()) { // tick if (userTrackerFrame.getTimestamp() - m_poseTime > g_poseTimeoutToExit * 1000) { printf("Count down complete. Exit...\n"); Finalize(); exit(2); } } } } if (g_drawFrameId) { DrawFrameId(userTrackerFrame.getFrameIndex()); } if (g_generalMessage[0] != '\0') { char *msg = g_generalMessage; glColor3f(1.0f, 0.0f, 0.0f); glRasterPos2i(100, 20); glPrintString(GLUT_BITMAP_HELVETICA_18, msg); } // Swap the OpenGL display buffers glutSwapBuffers(); }
void PlayerTracker::onNewFrame(nite::UserTracker& userTracker) { { const juce::ScopedLock sL(trackerAccess); userTracker.readFrame(&userTrackerFrame); } if (niteRc != nite::STATUS_OK) { return; } const nite::Array<nite::UserData>& users = userTrackerFrame.getUsers(); for (int i = 0; i < users.getSize(); ++i) { const nite::UserData& user = users[i]; updateUserState(user,userTrackerFrame.getTimestamp()); if (user.isNew()) { userTracker.startSkeletonTracking(user.getId()); userTracker.setSkeletonSmoothingFactor(.85f); } else if (user.getSkeleton().getState() == nite::SKELETON_TRACKED) { //If hands positions have not enough confidence, this frame is skipped if (! handsTracker.update(user.getSkeleton())) return; if (transportGesture.checkTransportGesture(handsTracker, user.getSkeleton())) { if (transportGesture.getTransportStatus() == TransportGesture::PLAY) { activateMusicalGestureDetection(); sequencer->play(); } else if (transportGesture.getTransportStatus() == TransportGesture::PAUSE) { deactivateMusicalGestureDetection(); sequencer->pause(); } else if (transportGesture.getTransportStatus() == TransportGesture::STOP) { deactivateMusicalGestureDetection(); sequencer->stop(); } } //Detect expression changes if (musicalGestureDetectionActivated && expressionGesture.checkExpressionGesture(handsTracker, user.getSkeleton())) { sequencer->setExpression(expressionGesture.getExpressionDetected()); } //Detect tempo changes if (musicalGestureDetectionActivated && tempoGesture.checkTempoGesture(handsTracker, user.getSkeleton())) { sequencer->setTempo(tempoGesture.getTempo()); } //Send hands position to game (display hands as spheres) Sim::postEvent(Sim::getRootGroup(), new HandsMove(handsTracker.torqueCoordinatesLeftHand, handsTracker.torqueCoordinatesRightHand), -1); } } }