void initialize() { createInstance(); // Kinectの設定を初期化する ERROR_CHECK( kinect->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON ) ); // RGBカメラを初期化する ERROR_CHECK( kinect->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, CAMERA_RESOLUTION, 0, 2, 0, &imageStreamHandle ) ); // 距離カメラを初期化する ERROR_CHECK( kinect->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, CAMERA_RESOLUTION, 0, 2, 0, &depthStreamHandle ) ); // Nearモード //ERROR_CHECK( kinect->NuiImageStreamSetImageFrameFlags( // depthStreamHandle, NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE ) ); // スケルトンを初期化する ERROR_CHECK( kinect->NuiSkeletonTrackingEnable( 0, NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT ) ); // フレーム更新イベントのハンドルを作成する streamEvent = ::CreateEvent( 0, TRUE, FALSE, 0 ); ERROR_CHECK( kinect->NuiSetFrameEndEvent( streamEvent, 0 ) ); // 指定した解像度の、画面サイズを取得する ::NuiImageResolutionToSize(CAMERA_RESOLUTION, width, height ); }
void initialize() { createInstance(); // Kinectの設定を初期化する ERROR_CHECK( kinect->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_SKELETON ) ); // RGBカメラを初期化する ERROR_CHECK( kinect->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, CAMERA_RESOLUTION, 0, 2, 0, &imageStreamHandle ) ); // スケルトンを初期化する ERROR_CHECK( kinect->NuiSkeletonTrackingEnable( 0, NUI_SKELETON_TRACKING_FLAG_SUPPRESS_NO_FRAME_DATA ) ); // フレーム更新イベントのハンドルを作成する streamEvent = ::CreateEvent( 0, TRUE, FALSE, 0 ); ERROR_CHECK( kinect->NuiSetFrameEndEvent( streamEvent, 0 ) ); // 指定した解像度の、画面サイズを取得する ::NuiImageResolutionToSize(CAMERA_RESOLUTION, width, height ); }
int main(void) { //Set the error callback glfwSetErrorCallback(error_callback); //Initialize GLFW if (!glfwInit()) { exit(EXIT_FAILURE); } //Set the GLFW window creation hints - these are optional //glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); //Request a specific OpenGL version //glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3); //Request a specific OpenGL version //glfwWindowHint(GLFW_SAMPLES, 4); //Request 4x antialiasing //glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); //Create a window and create its OpenGL context window = glfwCreateWindow(960, 720, "Test Window", NULL, NULL); //If the window couldn't be created if (!window) { fprintf(stderr, "Failed to open GLFW window.\n"); glfwTerminate(); //exit(EXIT_FAILURE); } //This function makes the context of the specified window current on the calling thread. glfwMakeContextCurrent(window); //Sets the key callback glfwSetKeyCallback(window, key_callback); //Initialize GLEW GLenum err = glewInit(); //If GLEW hasn't initialized if (err != GLEW_OK) { fprintf(stderr, "Error: %s\n", glewGetErrorString(err)); return -1; } //Set a background color glClearColor(0.0f, 0.0f, 0.0f, 0.0f); glfwSetCursorPos(window, 1024 / 2, 768 / 2); GLuint VertexArrayID; glGenVertexArrays(1, &VertexArrayID); glBindVertexArray(VertexArrayID); // Create and compile our GLSL program from the shaders GLuint red = LoadShaders("SimpleTransform.vertexshader", "SingleColorRed.fragmentshader"); GLuint grid = LoadShaders("SimpleTransform.vertexshader", "SingleColorGrid.fragmentshader"); glBindFragDataLocation(red, 0, "red"); glBindFragDataLocation(grid, 1, "grid"); // Get a handle for our "MVP" uniform GLuint MatrixID = glGetUniformLocation(red, "MVP"); // Projection matrix : 45° Field of View, 4:3 ratio, display range : 0.1 unit <-> 100 units glm::mat4 Projection = glm::perspective(45.0f, 4.0f / 3.0f, 0.1f, 1000.0f); // Or, for an ortho camera : //glm::mat4 Projection = glm::ortho(-10.0f,10.0f,-10.0f,10.0f,0.0f,100.0f); // In world coordinates // Camera matrix glm::mat4 View = glm::lookAt( glm::vec3(4, 3, 3), // Camera is at (4,3,3), in World Space glm::vec3(0, 0, 0), // and looks at the origin glm::vec3(0, 1, 0) // Head is up (set to 0,-1,0 to look upside-down) ); static const GLfloat g_vertex_buffer_data[] = { -1.0f, -1.0f, 0.0f, 1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f, }; static const GLushort g_element_buffer_data[] = { 0, 1, 2 }; GLuint vertexbuffer; glGenBuffers(1, &vertexbuffer); glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer); glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW); static const GLfloat g_triangle_buffer_data[] = { -1.0f, -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 0.0f, 1.0f, -1.0f, }; GLuint triangle; glGenBuffers(1, &triangle); glBindBuffer(GL_ARRAY_BUFFER, triangle); glBufferData(GL_ARRAY_BUFFER, sizeof(g_triangle_buffer_data), g_triangle_buffer_data, GL_STATIC_DRAW); // Enable depth test glEnable(GL_DEPTH_TEST); // Accept fragment if it closer to the camera than the former one glDepthFunc(GL_LESS); glEnable(GL_CULL_FACE); glEnable(GL_LIGHTING); glEnable(GL_SMOOTH);//OPENGL INSTANTIATION HRESULT hr; NUI_IMAGE_FRAME depthFrame; HANDLE hDepth; INuiSensor* pNuiSensor = NULL; int iSensorCount = 0; hr = NuiGetSensorCount(&iSensorCount); if (FAILED(hr)) return hr; for (int i = 0; i < iSensorCount; i++) { INuiSensor* tempSensor; hr = NuiCreateSensorByIndex(i, &tempSensor); if (FAILED(hr)) continue; hr = tempSensor->NuiStatus(); if (S_OK == hr) { pNuiSensor = tempSensor; break; } tempSensor->Release(); } for (int i = 0; i < 2048; i++) { depthLookUp[i] = rawDepthToMeters(i); } rotation = getRotationMatrix(theta, psi, fi); pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_DEPTH); pNuiSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH, NUI_IMAGE_RESOLUTION_320x240, 0, 2, NULL, &hDepth);//KINECT INSTANTIATION cout << "Starting Main Loop"; static double lastTime = glfwGetTime(); //Main Loop do { double currentTime = glfwGetTime(); float deltaTime = float(currentTime - lastTime); //Clear color buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glUseProgram(grid); modelMatrix(MatrixID); hr = pNuiSensor->NuiImageStreamGetNextFrame(hDepth, 0, &depthFrame); if (!FAILED(hr)) { INuiFrameTexture* pTexture; NUI_LOCKED_RECT LockedRect; hr = pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture( hDepth, &depthFrame, false, &pTexture); if (FAILED(hr)) { pNuiSensor->NuiImageStreamReleaseFrame(hDepth, &depthFrame); continue; } pTexture->LockRect(0, &LockedRect, NULL, 0);//Kinect Image Grab int skipX = 1; int skipY = 1; float scalar = 4.0f; if (LockedRect.Pitch != 0) { for (int x = 0; x < width; x += skipX) { for (int y = 0; y < height; y += skipY) { const NUI_DEPTH_IMAGE_PIXEL * pBufferRun = reinterpret_cast<const NUI_DEPTH_IMAGE_PIXEL *>(LockedRect.pBits) + x + y * width; //float depth = (float)(pBufferRun->depth); //glm::vec3 location = realWorld(depth, height - y, x, 500.0f, 1000.0f); //createCube(0.006f, location); Vector4 locationDepth = NuiTransformDepthImageToSkeleton(x, y, (short)(pBufferRun->depth << 3)); glm::vec3 locationDepthxyz = glm::vec3(locationDepth.x * scalar, locationDepth.y * scalar, locationDepth.z * scalar); createCube(0.009f, locationDepthxyz); } } } pTexture->UnlockRect(0); pTexture->Release(); pNuiSensor->NuiImageStreamReleaseFrame(hDepth, &depthFrame); } createGrid(); //Test drawings /* glUseProgram(red); modelMatrix(MatrixID); //createCube(0.05f, glm::vec3(1.0f,1.0f,1.0f)); // 1rst attribute buffer : vertices glEnableVertexAttribArray(0); //createObject(vertexbuffer, GL_TRIANGLES, 3); //createObject(triangle, GL_TRIANGLES, 3); glDisableVertexAttribArray(0); */ //Swap buffers glfwSwapBuffers(window); //Get and organize events, like keyboard and mouse input, window resizing, etc... glfwPollEvents(); std::string title = "Title | DELTA TIME " + std::to_string(1.0f/deltaTime); const char* pszConstString = title.c_str(); glfwSetWindowTitle(window, pszConstString); lastTime = currentTime; } //Check if the ESC key had been pressed or if the window had been closed while (!glfwWindowShouldClose(window)); //Close OpenGL window and terminate GLFW glfwDestroyWindow(window); //Finalize and clean up GLFW glfwTerminate(); exit(EXIT_SUCCESS); }
int _tmain(int argc, _TCHAR* argv[]) { cv::setUseOptimized( true ); // Kinectのインスタンス生成、初期化 INuiSensor* pSensor; HRESULT hResult = S_OK; hResult = NuiCreateSensorByIndex( 0, &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiCreateSensorByIndex" << std::endl; return -1; } hResult = pSensor->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiInitialize" << std::endl; return -1; } // Colorストリームの開始 HANDLE hColorEvent = INVALID_HANDLE_VALUE; HANDLE hColorHandle = INVALID_HANDLE_VALUE; hColorEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hColorEvent, &hColorHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( COLOR )" << std::endl; return -1; } HANDLE hEvents[1] = { hColorEvent }; cv::namedWindow( "Color" ); while( 1 ){ // フレームの更新待ち ResetEvent( hColorEvent ); WaitForMultipleObjects( ARRAYSIZE( hEvents ), hEvents, true, INFINITE ); // Colorカメラからフレームを取得 NUI_IMAGE_FRAME pColorImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hColorHandle, 0, &pColorImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( COLOR )" << std::endl; return -1; } // Color画像データの取得 INuiFrameTexture* pColorFrameTexture = pColorImageFrame.pFrameTexture; NUI_LOCKED_RECT sColorLockedRect; pColorFrameTexture->LockRect( 0, &sColorLockedRect, nullptr, 0 ); // 表示 cv::Mat colorMat( 480, 640, CV_8UC4, reinterpret_cast<uchar*>( sColorLockedRect.pBits ) ); cv::imshow( "Color", colorMat ); // フレームの解放 pColorFrameTexture->UnlockRect( 0 ); pSensor->NuiImageStreamReleaseFrame( hColorHandle, &pColorImageFrame ); // ループの終了判定(Escキー) if( cv::waitKey( 30 ) == VK_ESCAPE ){ break; } } // Kinectの終了処理 pSensor->NuiShutdown(); CloseHandle( hColorEvent ); CloseHandle( hColorHandle ); cv::destroyAllWindows(); return 0; }
int _tmain(int argc, _TCHAR* argv[]) { cv::setUseOptimized( true ); INuiSensor* pSensor; HRESULT hResult = S_OK; hResult = NuiCreateSensorByIndex( 0, &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiCreateSensorByIndex" << std::endl; return -1; } hResult = pSensor->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiInitialize" << std::endl; return -1; } HANDLE hColorEvent = INVALID_HANDLE_VALUE; HANDLE hColorHandle = INVALID_HANDLE_VALUE; hColorEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hColorEvent, &hColorHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( COLOR )" << std::endl; return -1; } HANDLE hDepthPlayerEvent = INVALID_HANDLE_VALUE; HANDLE hDepthPlayerHandle = INVALID_HANDLE_VALUE; hDepthPlayerEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hDepthPlayerEvent, &hDepthPlayerHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( DEPTH&PLAYER )" << std::endl; return -1; } HANDLE hSkeletonEvent = INVALID_HANDLE_VALUE; hSkeletonEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiSkeletonTrackingEnable( hSkeletonEvent, 0 ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiSkeletonTrackingEnable" << std::endl; return -1; } unsigned long refWidth = 0; unsigned long refHeight = 0; NuiImageResolutionToSize( NUI_IMAGE_RESOLUTION_640x480, refWidth, refHeight ); int width = static_cast<int>( refWidth ); int height = static_cast<int>( refHeight ); INuiCoordinateMapper* pCordinateMapper; hResult = pSensor->NuiGetCoordinateMapper( &pCordinateMapper ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiGetCoordinateMapper" << std::endl; return -1; } std::vector<NUI_COLOR_IMAGE_POINT> pColorPoint( width * height ); HANDLE hEvents[3] = { hColorEvent, hDepthPlayerEvent, hSkeletonEvent }; cv::Vec3b color[7]; color[0] = cv::Vec3b( 0, 0, 0 ); color[1] = cv::Vec3b( 255, 0, 0 ); color[2] = cv::Vec3b( 0, 255, 0 ); color[3] = cv::Vec3b( 0, 0, 255 ); color[4] = cv::Vec3b( 255, 255, 0 ); color[5] = cv::Vec3b( 255, 0, 255 ); color[6] = cv::Vec3b( 0, 255, 255 ); cv::namedWindow( "Color" ); cv::namedWindow( "Depth" ); cv::namedWindow( "Player" ); cv::namedWindow( "Skeleton" ); while( 1 ){ ResetEvent( hColorEvent ); ResetEvent( hDepthPlayerEvent ); ResetEvent( hSkeletonEvent ); WaitForMultipleObjects( ARRAYSIZE( hEvents ), hEvents, true, INFINITE ); NUI_IMAGE_FRAME colorImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hColorHandle, 0, &colorImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( COLOR )" << std::endl; return -1; } INuiFrameTexture* pColorFrameTexture = colorImageFrame.pFrameTexture; NUI_LOCKED_RECT colorLockedRect; pColorFrameTexture->LockRect( 0, &colorLockedRect, nullptr, 0 ); NUI_IMAGE_FRAME depthPlayerImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hDepthPlayerHandle, 0, &depthPlayerImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( DEPTH&PLAYER )" << std::endl; return -1; } BOOL nearMode = false; INuiFrameTexture* pDepthPlayerFrameTexture = nullptr; pSensor->NuiImageFrameGetDepthImagePixelFrameTexture( hDepthPlayerHandle, &depthPlayerImageFrame, &nearMode, &pDepthPlayerFrameTexture ); NUI_LOCKED_RECT depthPlayerLockedRect; pDepthPlayerFrameTexture->LockRect( 0, &depthPlayerLockedRect, nullptr, 0 ); NUI_SKELETON_FRAME skeletonFrame = { 0 }; hResult = pSensor->NuiSkeletonGetNextFrame( 0, &skeletonFrame ); if( FAILED( hResult ) ){ std::cout << "Error : NuiSkeletonGetNextFrame" << std::endl; return -1; } /* NUI_TRANSFORM_SMOOTH_PARAMETERS smoothParameter; smoothParameter.fSmoothing = 0.5; smoothParameter.fCorrection = 0.5; smoothParameter.fPrediction = 0.0f; smoothParameter.fJitterRadius = 0.05f; smoothParameter.fMaxDeviationRadius = 0.04f; hResult = NuiTransformSmooth( &skeletonFrame, &smoothParameter ); */ cv::Mat colorMat( height, width, CV_8UC4, reinterpret_cast<unsigned char*>( colorLockedRect.pBits ) ); cv::Mat bufferMat = cv::Mat::zeros( height, width, CV_16UC1 ); cv::Mat playerMat = cv::Mat::zeros( height, width, CV_8UC3 ); NUI_DEPTH_IMAGE_PIXEL* pDepthPlayerPixel = reinterpret_cast<NUI_DEPTH_IMAGE_PIXEL*>( depthPlayerLockedRect.pBits ); pCordinateMapper->MapDepthFrameToColorFrame( NUI_IMAGE_RESOLUTION_640x480, width * height, pDepthPlayerPixel, NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, width * height, &pColorPoint[0] ); for( int y = 0; y < height; y++ ){ for( int x = 0; x < width; x++ ){ unsigned int index = y * width + x; bufferMat.at<unsigned short>( pColorPoint[index].y, pColorPoint[index].x ) = pDepthPlayerPixel[index].depth; playerMat.at<cv::Vec3b>( pColorPoint[index].y, pColorPoint[index].x ) = color[pDepthPlayerPixel[index].playerIndex]; } } cv::Mat depthMat( height, width, CV_8UC1 ); bufferMat.convertTo( depthMat, CV_8U, -255.0f / 10000.0f, 255.0f ); cv::Mat skeletonMat = cv::Mat::zeros( height, width, CV_8UC3 ); NUI_COLOR_IMAGE_POINT colorPoint; for( int count = 0; count < NUI_SKELETON_COUNT; count++ ){ NUI_SKELETON_DATA skeletonData = skeletonFrame.SkeletonData[count]; if( skeletonData.eTrackingState == NUI_SKELETON_TRACKED ){ for( int position = 0; position < NUI_SKELETON_POSITION_COUNT; position++ ){ pCordinateMapper->MapSkeletonPointToColorPoint( &skeletonData.SkeletonPositions[position], NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, &colorPoint ); if( ( colorPoint.x >= 0 ) && ( colorPoint.x < width ) && ( colorPoint.y >= 0 ) && ( colorPoint.y < height ) ){ cv::circle( skeletonMat, cv::Point( colorPoint.x, colorPoint.y ), 10, static_cast<cv::Scalar>( color[count + 1] ), -1, CV_AA ); } } std::stringstream ss; ss << skeletonData.SkeletonPositions[NUI_SKELETON_POSITION_HIP_CENTER].z; pCordinateMapper->MapSkeletonPointToColorPoint( &skeletonData.SkeletonPositions[NUI_SKELETON_POSITION_HEAD], NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, &colorPoint ); if( ( colorPoint.x >= 0 ) && ( colorPoint.x < width ) && ( colorPoint.y >= 0 ) && ( colorPoint.y < height ) ){ cv::putText( skeletonMat, ss.str(), cv::Point( colorPoint.x - 50, colorPoint.y - 20 ), cv::FONT_HERSHEY_SIMPLEX, 1.5f, static_cast<cv::Scalar>( color[count + 1] ) ); } } else if( skeletonData.eTrackingState == NUI_SKELETON_POSITION_ONLY ){ pCordinateMapper->MapSkeletonPointToColorPoint( &skeletonData.SkeletonPositions[NUI_SKELETON_POSITION_HIP_CENTER], NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, &colorPoint ); if( ( colorPoint.x >= 0 ) && ( colorPoint.x < width ) && ( colorPoint.y >= 0 ) && ( colorPoint.y < height ) ){ cv::circle( skeletonMat, cv::Point( colorPoint.x, colorPoint.y ), 10, static_cast<cv::Scalar>( color[count + 1] ), -1, CV_AA ); } } } cv::imshow( "Color", colorMat ); cv::imshow( "Depth", depthMat ); cv::imshow( "Player", playerMat ); cv::imshow( "Skeleton", skeletonMat ); pColorFrameTexture->UnlockRect( 0 ); pDepthPlayerFrameTexture->UnlockRect( 0 ); pSensor->NuiImageStreamReleaseFrame( hColorHandle, &colorImageFrame ); pSensor->NuiImageStreamReleaseFrame( hDepthPlayerHandle, &depthPlayerImageFrame ); if( cv::waitKey( 30 ) == VK_ESCAPE ){ break; } } pSensor->NuiShutdown(); pSensor->NuiSkeletonTrackingDisable(); pCordinateMapper->Release(); CloseHandle( hColorEvent ); CloseHandle( hDepthPlayerEvent ); CloseHandle( hSkeletonEvent ); cv::destroyAllWindows(); return 0; }
int _tmain(int argc, _TCHAR* argv[]) { cv::setUseOptimized( true ); // Kinectのインスタンス生成、初期化 INuiSensor* pSensor; HRESULT hResult = S_OK; hResult = NuiCreateSensorByIndex( 0, &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiCreateSensorByIndex" << std::endl; return -1; } hResult = pSensor->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiInitialize" << std::endl; return -1; } // Colorストリーム HANDLE hColorEvent = INVALID_HANDLE_VALUE; HANDLE hColorHandle = INVALID_HANDLE_VALUE; hColorEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hColorEvent, &hColorHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( COLOR )" << std::endl; return -1; } // Depth&Playerストリーム HANDLE hDepthPlayerEvent = INVALID_HANDLE_VALUE; HANDLE hDepthPlayerHandle = INVALID_HANDLE_VALUE; hDepthPlayerEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hDepthPlayerEvent, &hDepthPlayerHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( DEPTH&PLAYER )" << std::endl; return -1; } // Skeletonストリーム HANDLE hSkeletonEvent = INVALID_HANDLE_VALUE; hSkeletonEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiSkeletonTrackingEnable( hSkeletonEvent, 0 ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiSkeletonTrackingEnable" << std::endl; return -1; } HANDLE hEvents[3] = { hColorEvent, hDepthPlayerEvent, hSkeletonEvent }; // カラーテーブル cv::Vec3b color[7]; color[0] = cv::Vec3b( 0, 0, 0 ); color[1] = cv::Vec3b( 255, 0, 0 ); color[2] = cv::Vec3b( 0, 255, 0 ); color[3] = cv::Vec3b( 0, 0, 255 ); color[4] = cv::Vec3b( 255, 255, 0 ); color[5] = cv::Vec3b( 255, 0, 255 ); color[6] = cv::Vec3b( 0, 255, 255 ); cv::namedWindow( "Color" ); cv::namedWindow( "Depth" ); cv::namedWindow( "Player" ); cv::namedWindow( "Skeleton" ); while( 1 ){ // フレームの更新待ち ResetEvent( hColorEvent ); ResetEvent( hDepthPlayerEvent ); ResetEvent( hSkeletonEvent ); WaitForMultipleObjects( ARRAYSIZE( hEvents ), hEvents, true, INFINITE ); // Colorカメラからフレームを取得 NUI_IMAGE_FRAME pColorImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hColorHandle, 0, &pColorImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( COLOR )" << std::endl; return -1; } // Depthセンサーからフレームを取得 NUI_IMAGE_FRAME pDepthPlayerImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hDepthPlayerHandle, 0, &pDepthPlayerImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( DEPTH&PLAYER )" << std::endl; return -1; } // Skeletonフレームを取得 NUI_SKELETON_FRAME pSkeletonFrame = { 0 }; hResult = pSensor->NuiSkeletonGetNextFrame( 0, &pSkeletonFrame ); if( FAILED( hResult ) ){ std::cout << "Error : NuiSkeletonGetNextFrame" << std::endl; return -1; } // Color画像データの取得 INuiFrameTexture* pColorFrameTexture = pColorImageFrame.pFrameTexture; NUI_LOCKED_RECT sColorLockedRect; pColorFrameTexture->LockRect( 0, &sColorLockedRect, nullptr, 0 ); // Depthデータの取得 INuiFrameTexture* pDepthPlayerFrameTexture = pDepthPlayerImageFrame.pFrameTexture; NUI_LOCKED_RECT sDepthPlayerLockedRect; pDepthPlayerFrameTexture->LockRect( 0, &sDepthPlayerLockedRect, nullptr, 0 ); // 表示 cv::Mat colorMat( 480, 640, CV_8UC4, reinterpret_cast<uchar*>( sColorLockedRect.pBits ) ); LONG registX = 0; LONG registY = 0; ushort* pBuffer = reinterpret_cast<ushort*>( sDepthPlayerLockedRect.pBits ); cv::Mat bufferMat = cv::Mat::zeros( 480, 640, CV_16UC1 ); cv::Mat playerMat = cv::Mat::zeros( 480, 640, CV_8UC3 ); for( int y = 0; y < 480; y++ ){ for( int x = 0; x < 640; x++ ){ pSensor->NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution( NUI_IMAGE_RESOLUTION_640x480, NUI_IMAGE_RESOLUTION_640x480, nullptr, x, y, *pBuffer, ®istX, ®istY ); if( ( registX >= 0 ) && ( registX < 640 ) && ( registY >= 0 ) && ( registY < 480 ) ){ bufferMat.at<ushort>( registY, registX ) = *pBuffer & 0xFFF8; playerMat.at<cv::Vec3b>( registY, registX ) = color[*pBuffer & 0x7]; } pBuffer++; } } cv::Mat depthMat( 480, 640, CV_8UC1 ); bufferMat.convertTo( depthMat, CV_8UC3, -255.0f / NUI_IMAGE_DEPTH_MAXIMUM, 255.0f ); cv::Mat skeletonMat = cv::Mat::zeros( 480, 640, CV_8UC3 ); cv::Point2f point; for( int count = 0; count < NUI_SKELETON_COUNT; count++ ){ NUI_SKELETON_DATA skeleton = pSkeletonFrame.SkeletonData[count]; if( skeleton.eTrackingState == NUI_SKELETON_TRACKED ){ for( int position = 0; position < NUI_SKELETON_POSITION_COUNT; position++ ){ NuiTransformSkeletonToDepthImage( skeleton.SkeletonPositions[position], &point.x, &point.y, NUI_IMAGE_RESOLUTION_640x480 ); cv::circle( skeletonMat, point, 10, static_cast<cv::Scalar>( color[count + 1] ), -1, CV_AA ); } } } cv::imshow( "Color", colorMat ); cv::imshow( "Depth", depthMat ); cv::imshow( "Player", playerMat ); cv::imshow( "Skeleton", skeletonMat ); // フレームの解放 pColorFrameTexture->UnlockRect( 0 ); pDepthPlayerFrameTexture->UnlockRect( 0 ); pSensor->NuiImageStreamReleaseFrame( hColorHandle, &pColorImageFrame ); pSensor->NuiImageStreamReleaseFrame( hDepthPlayerHandle, &pDepthPlayerImageFrame ); // ループの終了判定(Escキー) if( cv::waitKey( 30 ) == VK_ESCAPE ){ break; } } // Kinectの終了処理 pSensor->NuiShutdown(); pSensor->NuiSkeletonTrackingDisable(); CloseHandle( hColorEvent ); CloseHandle( hDepthPlayerEvent ); CloseHandle( hSkeletonEvent ); CloseHandle( hColorHandle ); CloseHandle( hDepthPlayerHandle ); cv::destroyAllWindows(); return 0; }
int _tmain(int argc, _TCHAR* argv[]) { cv::setUseOptimized( true ); INuiSensor* pSensor; HRESULT hResult = S_OK; hResult = NuiCreateSensorByIndex( 0, &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiCreateSensorByIndex" << std::endl; return -1; } hResult = pSensor->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiInitialize" << std::endl; return -1; } HANDLE hColorEvent = INVALID_HANDLE_VALUE; HANDLE hColorHandle = INVALID_HANDLE_VALUE; hColorEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hColorEvent, &hColorHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( COLOR )" << std::endl; return -1; } INuiColorCameraSettings* pCameraSettings; hResult = pSensor->NuiGetColorCameraSettings( &pCameraSettings ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiGetColorCameraSettings" << std::endl; return -1; } unsigned long refWidth = 0; unsigned long refHeight = 0; NuiImageResolutionToSize( NUI_IMAGE_RESOLUTION_640x480, refWidth, refHeight ); int width = static_cast<int>( refWidth ); int height = static_cast<int>( refHeight ); HANDLE hEvents[1] = { hColorEvent }; cv::namedWindow( "Color" ); while( 1 ){ ResetEvent( hColorEvent ); WaitForMultipleObjects( ARRAYSIZE( hEvents ), hEvents, true, INFINITE ); NUI_IMAGE_FRAME colorImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hColorHandle, 0, &colorImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( COLOR )" << std::endl; return -1; } INuiFrameTexture* pColorFrameTexture = colorImageFrame.pFrameTexture; NUI_LOCKED_RECT colorLockedRect; pColorFrameTexture->LockRect( 0, &colorLockedRect, nullptr, 0 ); cv::Mat colorMat( height, width, CV_8UC4, reinterpret_cast<unsigned char*>( colorLockedRect.pBits ) ); cv::imshow( "Color", colorMat ); pColorFrameTexture->UnlockRect( 0 ); pSensor->NuiImageStreamReleaseFrame( hColorHandle, &colorImageFrame ); int key = cv::waitKey( 30 ); if( key == VK_ESCAPE ){ break; } } pSensor->NuiShutdown(); pCameraSettings->Release(); CloseHandle( hColorEvent ); cv::destroyAllWindows(); return 0; }