void ColorStream::setPixels(openni::VideoFrameRef frame) { Stream::setPixels(frame); openni::VideoMode m = frame.getVideoMode(); int w = m.getResolutionX(); int h = m.getResolutionY(); int num_pixels = w * h; pix.allocate(w, h, 3); if (m.getPixelFormat() == openni::PIXEL_FORMAT_RGB888) { const unsigned char *src = (const unsigned char*)frame.getData(); unsigned char *dst = pix.getBackBuffer().getPixels(); for (int i = 0; i < num_pixels; i++) { dst[0] = src[0]; dst[1] = src[1]; dst[2] = src[2]; src += 3; dst += 3; } } pix.swap(); }
static void toCVTImage( Image& dst, const openni::VideoFrameRef& frame ) { dst.reallocate( frame.getWidth(), frame.getHeight(), Openni2Helper::toIFormat( frame.getVideoMode().getPixelFormat() ) ); switch( frame.getVideoMode().getPixelFormat() ){ case openni::PIXEL_FORMAT_RGB888: copyRGB( dst, ( const uint8_t* )frame.getData(), frame.getStrideInBytes() ); break; default: copyData( dst, ( const uint8_t* )frame.getData(), frame.getStrideInBytes() ); } }
void DepthStream::setPixels(openni::VideoFrameRef frame) { Stream::setPixels(frame); const unsigned short *pixels = (const unsigned short*)frame.getData(); int w = frame.getVideoMode().getResolutionX(); int h = frame.getVideoMode().getResolutionY(); int num_pixels = w * h; pix.allocate(w, h, 1); pix.getBackBuffer().setFromPixels(pixels, w, h, OF_IMAGE_GRAYSCALE); pix.swap(); }
// CV_16U cv::Mat getDepthImage(openni::VideoFrameRef& depth_frame) { if(!depth_frame.isValid()) { return cv::Mat(); } openni::VideoMode video_mode = depth_frame.getVideoMode(); cv::Mat depth_img = cv::Mat(video_mode.getResolutionY(), video_mode.getResolutionX(), CV_16U, (char*)depth_frame.getData()); return depth_img.clone(); }
cv::Mat getColorImage(openni::VideoFrameRef& color_frame) { if(!color_frame.isValid()) { return cv::Mat(); } openni::VideoMode video_mode = color_frame.getVideoMode(); cv::Mat color_img = cv::Mat(video_mode.getResolutionY(), video_mode.getResolutionX(), CV_8UC3, (char*)color_frame.getData()); cv::Mat ret_img; cv::cvtColor(color_img, ret_img, CV_RGB2BGR); return ret_img; }
// カラーストリームを表示できる形に変換する cv::Mat showColorStream( const openni::VideoFrameRef& colorFrame ) { cv::Mat colorImage; // Color ストリーム if ( colorFrame.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_RGB888 ) { // OpenCV の形に変換する colorImage = cv::Mat( colorFrame.getHeight(), colorFrame.getWidth(), CV_8UC3, (unsigned char*)colorFrame.getData() ); // BGR の並びを RGB に変換する cv::cvtColor( colorImage, colorImage, CV_RGB2BGR ); } // Xtion IR ストリーム else if ( colorFrame.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_GRAY16 ) { // XitonのIRのフォーマットは16bitグレースケール // 実際は255諧調らしく、CV_8Uに落とさないと見えない colorImage = cv::Mat( colorFrame.getHeight(), colorFrame.getWidth(), CV_16UC1, (unsigned short*)colorFrame.getData() ); colorImage.convertTo( colorImage, CV_8U ); } // Kinect for Windows IR ストリーム else { // KinectのIRのフォーマットは8bitグレースケール // Kinect SDKは16bitグレースケール colorImage = cv::Mat( colorFrame.getHeight(), colorFrame.getWidth(), CV_8UC1, (unsigned char*)colorFrame.getData() ); } return colorImage; }
void IrStream::setPixels(openni::VideoFrameRef frame) { Stream::setPixels(frame); openni::VideoMode m = frame.getVideoMode(); int w = m.getResolutionX(); int h = m.getResolutionY(); int num_pixels = w * h; pix.allocate(w, h, 1); if (m.getPixelFormat() == openni::PIXEL_FORMAT_GRAY8) { const unsigned char *src = (const unsigned char*)frame.getData(); unsigned char *dst = pix.getBackBuffer().getPixels(); for (int i = 0; i < num_pixels; i++) { dst[0] = src[0]; src++;; dst++; } } else if (m.getPixelFormat() == openni::PIXEL_FORMAT_GRAY16) { const unsigned short *src = (const unsigned short*)frame.getData(); unsigned char *dst = pix.getBackBuffer().getPixels(); for (int i = 0; i < num_pixels; i++) { dst[0] = src[0] >> 2; src++;; dst++; } } pix.swap(); }
void SampleViewer::Display() { nite::Status rc = m_pHandTracker->readFrame(&handFrame); if (rc != nite::STATUS_OK) { printf("GetNextData failed\n"); return; } depthFrame = handFrame.getDepthFrame(); if (m_pTexMap == NULL) { // Texture map init m_nTexMapX = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionX(), TEXTURE_SIZE); m_nTexMapY = MIN_CHUNKS_SIZE(depthFrame.getVideoMode().getResolutionY(), TEXTURE_SIZE); m_pTexMap = new openni::RGB888Pixel[m_nTexMapX * m_nTexMapY]; } glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glEnable(GL_DEPTH_TEST); glMatrixMode(GL_PROJECTION); glPushMatrix(); glLoadIdentity(); glOrtho(0, GL_WIN_SIZE_X, GL_WIN_SIZE_Y, 0, -10000.0, 10000.0); if (depthFrame.isValid()) { calculateHistogram(m_pDepthHist, MAX_DEPTH, depthFrame); } memset(m_pTexMap, 0, m_nTexMapX*m_nTexMapY*sizeof(openni::RGB888Pixel)); float factor[3] = {1, 1, 1}; // check if we need to draw depth frame to texture float av_x = 0; float av_y = 0; int counter= 0; for(int i = 0; i<=7 ; i++) note_on[i] = false; if (depthFrame.isValid() && g_drawDepth) { const openni::DepthPixel* pDepthRow = (const openni::DepthPixel*)depthFrame.getData(); const openni::DepthPixel* pDepthRow1 = pDepthRow; openni::RGB888Pixel* pTexRow = m_pTexMap + depthFrame.getCropOriginY() * m_nTexMapX; int rowSize = depthFrame.getStrideInBytes() / sizeof(openni::DepthPixel); glPointSize(2); glBegin(GL_POINTS); for (int y = 0; y < depthFrame.getHeight(); ++y) { const openni::DepthPixel* pDepth = pDepthRow; openni::RGB888Pixel* pTex = pTexRow + depthFrame.getCropOriginX(); //chord_temp = 0; for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth, ++pTex) { if (*pDepth != 0) { factor[0] = Colors[colorCount][0]; factor[1] = Colors[colorCount][1]; factor[2] = Colors[colorCount][2]; int nHistValue = m_pDepthHist[*pDepth]; pTex->r = nHistValue*factor[0]; pTex->g = nHistValue*factor[1]; pTex->b = nHistValue*factor[2]; factor[0] = factor[1] = factor[2] = 1; if(*pDepth <= 800) { //glColor3f(1,0,0); glColor3f(float(*pDepth)/2000,float(*pDepth)/2000,float(*pDepth)/2000); av_x = x + av_x; counter++; av_y = y + av_y; } else{ glColor3f(float(*pDepth)/2000,float(*pDepth)/2000,float(*pDepth)/2000); } glVertex3f(2*x,2*y,-*pDepth); } } pDepthRow += rowSize; pTexRow += m_nTexMapX; } glEnd(); ////////////////////////////////////////////////////////////////////////////////////////////////////////////////// av_x = av_x / counter; av_y = av_y / counter; float R_x=0; float R_y=0; float L_x=0; float L_y=0; int counter_R=0; int counter_L=0; ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// for (int y = 0; y < depthFrame.getHeight(); ++y) { const openni::DepthPixel* pDepth = pDepthRow1; //chord_temp = 0; for (int x = 0; x < depthFrame.getWidth(); ++x, ++pDepth) { if (*pDepth != 0) { if(*pDepth <= 800) { if(x > av_x){ counter_R++; R_x = R_x +x; R_y = R_y +y; } if(x < av_x){ counter_L++; L_x = L_x +x; L_y = L_y +y; } } } } pDepthRow1 += rowSize; } ///////////////////////////////////////////////////////////////// R_x = R_x/counter_R; R_y = R_y/counter_R; L_x = L_x/counter_L; L_y = L_y/counter_L; glPointSize(30); glBegin(GL_POINTS); glColor3f(1,0,0); glVertex3f(R_x*2,R_y*2,800); glColor3f(1,1,0); glVertex3f(L_x*2,L_y*2,800); glEnd(); if( R_x >=75 && R_x <=175 ){ if( R_y <= 150 ) { note_on[0] = true; } else if( R_y >= 350) { note_on[1] = true; } } if( R_x >=175 && R_x <=300){ if( R_y <= 150 ) { note_on[2] = true; } else if( R_y >= 350 ) { note_on[3] = true; } } if( R_x>=300 && R_x<=425){ if( R_y <= 150 ) { note_on[4] = true; } else if( R_y >= 350 ) { note_on[5] = true; } } if( R_x>=425 && R_x<=550){ if( R_y <= 150 ) { note_on[6] = true; } else if( R_y >= 350 ) { note_on[7] = true; } } //////////////////////////////////////// if( L_x >=75 && L_x <=175 ){ if( L_y <= 150 ) { note_on[0] = true; } else if( L_y >= 350) { note_on[1] = true; } } if( L_x >=175 && L_x <=300){ if( L_y <= 150 ) { note_on[2] = true; } else if( L_y >= 350 ) { note_on[3] = true; } } if( L_x>=300 && L_x<=425){ if( L_y <= 150 ) { note_on[4] = true; } else if( L_y >= 350 ) { note_on[5] = true; } } if( L_x>=425 && L_x<=550){ if( L_y <= 150 ) { note_on[6] = true; } else if( L_y >= 350 ) { note_on[7] = true; } } } playdrum(); for(int i=0;i<=7 ;i++) last_note[i] = note_on[i]; glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, BMPwidth1,BMPheight1, 0, GL_RGB, GL_UNSIGNED_BYTE, BMPimage1); glEnable(GL_BLEND); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); glColor4f(1,1,1,0.5); glEnable(GL_TEXTURE_2D); glBegin(GL_QUADS); g_nXRes = depthFrame.getVideoMode().getResolutionX(); g_nYRes = depthFrame.getVideoMode().getResolutionY(); // upper left glTexCoord2f(0,1); glVertex3f(0,0,-800); // upper right glTexCoord2f(1,1); glVertex3f(1240,0,-800); // bottom right glTexCoord2f(1,0); glVertex3f(1240,960,-800); // bottom left glTexCoord2f(0,0); glVertex3f(0,960,-800); glEnd(); glDisable(GL_TEXTURE_2D); glDisable(GL_BLEND); /////////////////////////////////////////////////////////////////////////////////chord selection glBegin(GL_LINES); glColor3f(1,0,0); glVertex3f(150,300,800); glVertex3f(1100,300,800); glVertex3f(150,700,800); glVertex3f(1100,700,800); glEnd(); glPointSize(30); glBegin(GL_POINTS); glColor3f(1,1,0); glVertex3f(150,300,800); glVertex3f(350,300,800); glVertex3f(600,300,800); glVertex3f(850,300,800); glVertex3f(1100,300,800); glVertex3f(150,700,800); glVertex3f(350,700,800); glVertex3f(600,700,800); glVertex3f(850,700,800); glVertex3f(1100,700,800); glEnd(); //////////////////////////////////////////////////////////////////////////////////////////////////////////// const nite::Array<nite::GestureData>& gestures = handFrame.getGestures(); for (int i = 0; i < gestures.getSize(); ++i) { if (gestures[i].isComplete()) { const nite::Point3f& position = gestures[i].getCurrentPosition(); printf("Gesture %d at (%f,%f,%f)\n", gestures[i].getType(), position.x, position.y, position.z); nite::HandId newId; m_pHandTracker->startHandTracking(gestures[i].getCurrentPosition(), &newId); } } const nite::Array<nite::HandData>& hands= handFrame.getHands(); for (int i = 0; i < hands.getSize(); ++i) { const nite::HandData& user = hands[i]; if (!user.isTracking()) { printf("Lost hand %d\n", user.getId()); nite::HandId id = user.getId(); HistoryBuffer<20>* pHistory = g_histories[id]; g_histories.erase(g_histories.find(id)); delete pHistory; } else { if (user.isNew()) { printf("Found hand %d\n", user.getId()); g_histories[user.getId()] = new HistoryBuffer<20>; } // Add to history HistoryBuffer<20>* pHistory = g_histories[user.getId()]; pHistory->AddPoint(user.getPosition()); // Draw history DrawHistory(m_pHandTracker, user.getId(), pHistory); } } if (g_drawFrameId) { DrawFrameId(handFrame.getFrameIndex()); } // Swap the OpenGL display buffers glutSwapBuffers(); }