void main() { // Create the session manager PXCSenseManager *sense_mgr = PXCSenseManager::CreateInstance(); // Select the stream. Note that if the format of the stream is not supported, the Init() will fail sense_mgr->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, 640, 480, 30); sense_mgr->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 640, 480, 30); // Start the camera. If the stream size is wrong, the following will fail pxcStatus status = sense_mgr->Init(); if (PXC_STATUS_NO_ERROR != status) { printf_s("Status is %d\n", status); return; } // Processing the image for (;;) { // This function blocks until a sample is ready if (sense_mgr->AcquireFrame(true)<PXC_STATUS_NO_ERROR) break; // retrieve the sample PXCCapture::Sample *sample = sense_mgr->QuerySample(); // work on the image sample->color PXCImage *image = sample->depth; PXCImage::ImageInfo imageInfo = image->QueryInfo(); // go fetching the next sample sense_mgr->ReleaseFrame(); } // Close down sense_mgr->Release(); }
void updateHandFrame() { handData->Update(); // 画像を初期化 handImage1 = cv::Mat::zeros( DEPTH_HEIGHT, DEPTH_WIDTH, CV_8UC1 ); handImage2 = cv::Mat::zeros( DEPTH_HEIGHT, DEPTH_WIDTH, CV_8UC1 ); // 検出した手の数を取得する auto numOfHands = handData->QueryNumberOfHands(); for ( int i = 0; i < numOfHands; i++ ) { // 手を取得する PXCHandData::IHand* hand; auto sts = handData->QueryHandData( PXCHandData::AccessOrderType::ACCESS_ORDER_BY_ID, i, hand ); if ( sts < PXC_STATUS_NO_ERROR ) { continue; } // 手のマスク画像を取得する PXCImage* image = 0; sts = hand->QuerySegmentationImage( image ); if ( sts < PXC_STATUS_NO_ERROR ) { continue; } // マスク画像を取得する PXCImage::ImageData data; sts = image->AcquireAccess( PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_Y8, &data ); if ( sts < PXC_STATUS_NO_ERROR ){ continue; } // マスク画像のサイズはDepthに依存 // 手は2つまで PXCImage::ImageInfo info = image->QueryInfo(); auto& handImage = (i == 0) ? handImage1 : handImage2; memcpy( handImage.data, data.planes[0], info.height * info.width ); // データを表示させる(時間がかかります) //for ( int i = 0; i < info.height * info.width; i++ ){ // std::cout << (int)data.planes[0][i] << " "; //} image->ReleaseAccess( &data ); } }
//-------------------------------------------------------------- void IntelFaceScanner::updateBitmap(PXCImage *newimage) { if(!newimage) return; PXCImage* image = newimage; PXCImage::ImageInfo info=image->QueryInfo(); PXCImage::ImageData data; if(info.format == PXCImage::PIXEL_FORMAT_RGB32) { if (image->AcquireAccess(PXCImage::ACCESS_READ,PXCImage::PIXEL_FORMAT_RGB32, &data)>=PXC_STATUS_NO_ERROR) { for(int i=0; i < info.height;i++) { memcpy_s((char*)(m_charBuffer+i*data.pitches[0]),info.width*4,data.planes[0]+i*data.pitches[0],info.width*4); } frame.setFromPixels(m_charBuffer, info.width, info.height, OF_IMAGE_COLOR_ALPHA, true); ScanningData fdata(frame); scanningData.send(fdata); image->ReleaseAccess(&data); } } }
void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[] ) { UInt64 *MXadress; if(nrhs==0) { printf("Close failed: Give Pointer to intel camera as input\n"); mexErrMsgTxt("Intel camera error"); } MXadress = (UInt64*)mxGetData(prhs[0]); if(MXadress[0]==0) { return; } UtilPipeline* utilPipelineP = (UtilPipeline*)MXadress[0]; UtilPipeline& utilPipeline = utilPipelineP[0]; PXCImage *depthImage = utilPipeline.QueryImage(PXCImage::IMAGE_TYPE_DEPTH); PXCImage::ImageData depthImageData; depthImage->AcquireAccess(PXCImage::ACCESS_READ,&depthImageData); //if(depthImageData.format != PXCImage::COLOR_FORMAT_DEPTH) //{ // mexErrMsgTxt("COLOR_FORMAT_DEPTH error"); //} if(depthImageData.type != PXCImage::SURFACE_TYPE_SYSTEM_MEMORY) { mexErrMsgTxt("SURFACE_TYPE_SYSTEM_MEMORY error"); } PXCImage::ImageInfo depthInfo; depthImage->QueryInfo(&depthInfo); printf("Depth Image : Width %d, Height %d \r\n",depthInfo.width,depthInfo.height); mwSize dimsDepth[2]; dimsDepth[0]=depthInfo.width; dimsDepth[1]=depthInfo.height; unsigned short *Iout; plhs[0] = mxCreateNumericArray(2, dimsDepth, mxUINT16_CLASS, mxREAL); Iout = (unsigned short*)mxGetData(plhs[0]); memcpy (Iout,depthImageData.planes[0],dimsDepth[0]*dimsDepth[1]*sizeof(unsigned short)); if(nlhs>1) { unsigned short *Cout; plhs[1] = mxCreateNumericArray(2, dimsDepth, mxUINT16_CLASS, mxREAL); Cout = (unsigned short*)mxGetData(plhs[1]); memcpy (Cout,depthImageData.planes[1],dimsDepth[0]*dimsDepth[1]*sizeof(unsigned short)); } if(nlhs>2) { mwSize dimsF[3]; dimsF[0]=2; dimsF[1]=depthInfo.width; dimsF[2]=depthInfo.height; float *Dout; plhs[2] = mxCreateNumericArray(3, dimsF, mxSINGLE_CLASS, mxREAL); Dout = (float*)mxGetData(plhs[2]); memcpy (Dout,depthImageData.planes[2],dimsF[0]*dimsF[1]*dimsF[2]*sizeof(float)); } depthImage->ReleaseAccess(&depthImageData); }
void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[] ) { UInt64 *MXadress; if(nrhs==0) { printf("Close failed: Give Pointer to intel camera as input\n"); mexErrMsgTxt("Intel camera error"); } MXadress = (UInt64*)mxGetData(prhs[0]); if(MXadress[0]==0) { return; } UtilPipeline* utilPipelineP = (UtilPipeline*)MXadress[0]; UtilPipeline& utilPipeline = utilPipelineP[0]; PXCImage *rgbImage = utilPipeline.QueryImage(PXCImage::IMAGE_TYPE_COLOR); PXCImage::ImageData rgbImageData; rgbImage->AcquireAccess(PXCImage::ACCESS_READ,&rgbImageData); //if(depthImageData.format != PXCImage::COLOR_FORMAT_DEPTH) //{ // mexErrMsgTxt("COLOR_FORMAT_DEPTH error"); //} if(rgbImageData.type != PXCImage::SURFACE_TYPE_SYSTEM_MEMORY) { mexErrMsgTxt("SURFACE_TYPE_SYSTEM_MEMORY error"); } PXCImage::ImageInfo rgbInfo; rgbImage->QueryInfo(&rgbInfo); printf("RGB Image : Width %d, Height %d \r\n",rgbInfo.width,rgbInfo.height); mwSize dimsRGB[3]; dimsRGB[0]=3; dimsRGB[1]=rgbInfo.width; dimsRGB[2]=rgbInfo.height; unsigned char *Iout; plhs[0] = mxCreateNumericArray(3, dimsRGB, mxUINT8_CLASS, mxREAL); Iout = (unsigned char*)mxGetData(plhs[0]); memcpy (Iout,rgbImageData.planes[0],dimsRGB[0]*dimsRGB[1]*dimsRGB[2]); rgbImage->ReleaseAccess(&rgbImageData); if(nlhs>1) { UtilCapture *capture = utilPipeline.QueryCapture(); if(!capture) { printf("No valid capture object\n"); return; } PXCCapture::Device *device = capture->QueryDevice(); if(!device) { printf("No valid device object\n"); return; } // Get Camera Projection Data PXCSession *session = utilPipeline.QuerySession(); pxcUID pid; device->QueryPropertyAsUID(PXCCapture::Device::PROPERTY_PROJECTION_SERIALIZABLE,&pid); PXCSmartPtr<PXCProjection> projection; PXCMetadata *metadata = session->DynamicCast<PXCMetadata>(); metadata->CreateSerializable<PXCProjection>( pid, &projection ); if(!projection) { printf("No valid projection data\n"); return; } pxcU32 npoints = (pxcU32) dimsRGB[1]*(pxcU32)dimsRGB[2]; PXCPointF32 *posc = new PXCPointF32[npoints]; PXCPointF32 *posd = new PXCPointF32[npoints]; int i=0; for (int y=0;y< dimsRGB[2];y++) { for (int x=0;x< dimsRGB[1];x++) { posc[i].x=(pxcF32)x, posc[i].y=(pxcF32)y; i++; } } projection->MapColorCoordinatesToDepth(npoints,posc,posd); //projection->Release(); mwSize dimsM[3]; dimsM[0]=2; dimsM[1]=dimsRGB[1]; dimsM[2]=dimsRGB[2]; plhs[1] = mxCreateNumericArray(3, dimsM, mxSINGLE_CLASS, mxREAL); float* Mout = (float*)mxGetData(plhs[1]); memcpy (Mout,posd,dimsM[0]*dimsM[1]*dimsM[2]*sizeof(float)); delete(posc); delete(posd); } }
// Camera Processing Thread // Initialize the RealSense SenseManager and initiate camera processing loop: // Step 1: Acquire new camera frame // Step 2: Load shared settings // Step 3: Perform Core SDK and middleware processing and store results // in background RealSenseDataFrame // Step 4: Swap the background and mid RealSenseDataFrames void RealSenseImpl::CameraThread() { uint64 currentFrame = 0; fgFrame->number = 0; midFrame->number = 0; bgFrame->number = 0; pxcStatus status = senseManager->Init(); RS_LOG_STATUS(status, "SenseManager Initialized") assert(status == PXC_STATUS_NO_ERROR); if (bFaceEnabled) { faceData = pFace->CreateOutput(); } while (bCameraThreadRunning == true) { // Acquires new camera frame status = senseManager->AcquireFrame(true); assert(status == PXC_STATUS_NO_ERROR); bgFrame->number = ++currentFrame; // Performs Core SDK and middleware processing and store results // in background RealSenseDataFrame if (bCameraStreamingEnabled) { PXCCapture::Sample* sample = senseManager->QuerySample(); CopyColorImageToBuffer(sample->color, bgFrame->colorImage, colorResolution.width, colorResolution.height); CopyDepthImageToBuffer(sample->depth, bgFrame->depthImage, depthResolution.width, depthResolution.height); } if (bScan3DEnabled) { if (bScanStarted) { PXC3DScan::Configuration config = p3DScan->QueryConfiguration(); config.startScan = true; p3DScan->SetConfiguration(config); bScanStarted = false; } if (bScanStopped) { PXC3DScan::Configuration config = p3DScan->QueryConfiguration(); config.startScan = false; p3DScan->SetConfiguration(config); bScanStopped = false; } PXCImage* scanImage = p3DScan->AcquirePreviewImage(); if (scanImage) { UpdateScan3DImageSize(scanImage->QueryInfo()); CopyColorImageToBuffer(scanImage, bgFrame->scanImage, scan3DResolution.width, scan3DResolution.height); scanImage->Release(); } if (bReconstructEnabled) { status = p3DScan->Reconstruct(scan3DFileFormat, scan3DFilename.GetCharArray().GetData()); bReconstructEnabled = false; bScanCompleted = true; } } if (bFaceEnabled) { faceData->Update(); bgFrame->headCount = faceData->QueryNumberOfDetectedFaces(); if (bgFrame->headCount > 0) { PXCFaceData::Face* face = faceData->QueryFaceByIndex(0); PXCFaceData::PoseData* poseData = face->QueryPose(); if (poseData) { PXCFaceData::HeadPosition headPosition = {}; poseData->QueryHeadPosition(&headPosition); bgFrame->headPosition = FVector(headPosition.headCenter.x, headPosition.headCenter.y, headPosition.headCenter.z); PXCFaceData::PoseEulerAngles headRotation = {}; poseData->QueryPoseAngles(&headRotation); bgFrame->headRotation = FRotator(headRotation.pitch, headRotation.yaw, headRotation.roll); } } } senseManager->ReleaseFrame(); // Swaps background and mid RealSenseDataFrames std::unique_lock<std::mutex> lockIntermediate(midFrameMutex); bgFrame.swap(midFrame); } }
// Camera Processing Thread // Initialize the RealSense SenseManager and initiate camera processing loop: // Step 1: Acquire new camera frame // Step 2: Load shared settings // Step 3: Perform Core SDK and middleware processing and store results // in background RealSenseDataFrame // Step 4: Swap the background and mid RealSenseDataFrames void RealSenseImpl::CameraThread() { uint64 currentFrame = 0; fgFrame->number = 0; midFrame->number = 0; bgFrame->number = 0; pxcStatus status = senseManager->Init(); RS_LOG_STATUS(status, "SenseManager Initialized") assert(status == PXC_STATUS_NO_ERROR); while (bCameraThreadRunning == true) { // Acquires new camera frame status = senseManager->AcquireFrame(true); assert(status == PXC_STATUS_NO_ERROR); bgFrame->number = ++currentFrame; PXCCapture::Sample* sample = senseManager->QuerySample(); // Performs Core SDK and middleware processing and store results // in background RealSenseDataFrame if (bColorStreamingEnabled) { if (sample->color) { CopyColorImageToBuffer(sample->color, bgFrame->colorImage, colorResolution.width, colorResolution.height); } } if (bDepthStreamingEnabled) { if (sample->depth) { CopyDepthImageToBuffer(sample->depth, bgFrame->depthImage, depthResolution.width, depthResolution.height); } } if (bScan3DEnabled) { if (bScanStarted) { PXC3DScan::Configuration config = p3DScan->QueryConfiguration(); config.startScan = true; p3DScan->SetConfiguration(config); bScanStarted = false; } if (bScanStopped) { PXC3DScan::Configuration config = p3DScan->QueryConfiguration(); config.startScan = false; p3DScan->SetConfiguration(config); bScanStopped = false; } PXCImage* scanImage = p3DScan->AcquirePreviewImage(); if (scanImage) { UpdateScan3DImageSize(scanImage->QueryInfo()); CopyColorImageToBuffer(scanImage, bgFrame->scanImage, scan3DResolution.width, scan3DResolution.height); scanImage->Release(); } if (bReconstructEnabled) { status = p3DScan->Reconstruct(scan3DFileFormat, scan3DFilename.GetCharArray().GetData()); bReconstructEnabled = false; bScanCompleted = true; } } senseManager->ReleaseFrame(); // Swaps background and mid RealSenseDataFrames std::unique_lock<std::mutex> lockIntermediate(midFrameMutex); bgFrame.swap(midFrame); } }
void Graphics::DrawBitmap(PXCCapture::Sample* sample) { PXCImage *imageDepth = sample->depth; assert(imageDepth); PXCImage::ImageInfo imageDepthInfo = imageDepth->QueryInfo(); m_outputImageInfo.width = 1024; m_outputImageInfo.height = 1024; m_outputImageInfo.format = PXCImage::PIXEL_FORMAT_RGB32; m_outputImageInfo.reserved = 0; m_outputImage = m_session->CreateImage(&m_outputImageInfo); assert(m_outputImage); PXCImage::ImageData imageDepthData; if(imageDepth->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_RGB32, &imageDepthData) >= PXC_STATUS_NO_ERROR) { memset(&m_outputImageData, 0, sizeof(m_outputImageData)); pxcStatus status = m_outputImage->AcquireAccess(PXCImage::ACCESS_WRITE, PXCImage::PIXEL_FORMAT_RGB32, &m_outputImageData); if(status < PXC_STATUS_NO_ERROR) return; int stridePixels = m_outputImageData.pitches[0]; pxcBYTE *pixels = reinterpret_cast<pxcBYTE*> (m_outputImageData.planes[0]); memset(pixels, 0, stridePixels * m_outputImageInfo.height); // get access to depth data PXCPoint3DF32* vertices = new PXCPoint3DF32[imageDepthInfo.width * imageDepthInfo.height]; PXCProjection* projection(m_senseManager->QueryCaptureManager()->QueryDevice()->CreateProjection()); if (!projection) { if (vertices) delete[] vertices; return; } projection->QueryVertices(imageDepth, vertices); projection->Release(); int strideVertices = imageDepthInfo.width; // render vertices int numVertices = 0; for (int y = 0; y < imageDepthInfo.height; y++) { const PXCPoint3DF32 *verticesRow = vertices + y * strideVertices; for (int x = 0; x < imageDepthInfo.width; x++) { const PXCPoint3DF32 &v = verticesRow[x]; if (v.z <= 0.0f) { continue; } int ix = 0, iy = 0; if(ProjectVertex(v, ix, iy)) { pxcBYTE *ptr = m_outputImageData.planes[0]; ptr += iy * m_outputImageData.pitches[0]; ptr += ix * 4; ptr[0] = pxcBYTE(255.0f * 0.5f); ptr[1] = pxcBYTE(255.0f * 0.5f); ptr[2] = pxcBYTE(255.0f * 0.5f); ptr[3] = pxcBYTE(255.0f); } numVertices++; } } if (vertices) delete[] vertices; if (m_bitmap) { DeleteObject(m_bitmap); m_bitmap = 0; } HWND hwndPanel = GetDlgItem(m_window, IDC_PANEL); HDC dc = GetDC(hwndPanel); BITMAPINFO binfo; memset(&binfo, 0, sizeof(binfo)); binfo.bmiHeader.biWidth = m_outputImageData.pitches[0] / 4; binfo.bmiHeader.biHeight = -(int)m_outputImageInfo.height; binfo.bmiHeader.biBitCount = 32; binfo.bmiHeader.biPlanes = 1; binfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); binfo.bmiHeader.biCompression = BI_RGB; Sleep(1); m_bitmap = CreateDIBitmap(dc, &binfo.bmiHeader, CBM_INIT, m_outputImageData.planes[0], &binfo, DIB_RGB_COLORS); ReleaseDC(hwndPanel, dc); m_outputImage->ReleaseAccess(&m_outputImageData); imageDepth->ReleaseAccess(&imageDepthData); m_outputImage->Release(); } }
/* *_stream() reads a single frame */ void GestureHandler::_stream() { if (!_pipeline.AcquireFrame(true)) // true - blocking return; // Retrieve QueryImage() as PXCSmartPtr<PXCImage> will crash later on PXCImage* image = _pipeline.QueryImage(PXCImage::IMAGE_TYPE_COLOR); PXCImage::ImageData image_data; pxcStatus sts = image->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::COLOR_FORMAT_RGB24, &image_data); if (sts < PXC_STATUS_NO_ERROR) { std::cout << "!!! Failed AcquireAccess." << std::endl; _pipeline.ReleaseFrame(); return; } PXCImage::ImageInfo image_info; image->QueryInfo(&image_info); if (sts < PXC_STATUS_NO_ERROR) { std::cout << "!!! Failed QueryInfo." << std::endl; _pipeline.ReleaseFrame(); return; } if (image_info.format == PXCImage::COLOR_FORMAT_RGB24) { // BGR layout on little-endian machine unsigned char* bgr_img_data = (unsigned char*)image_data.planes[0]; QImage temp(bgr_img_data, image_info.width, image_info.height, QImage::Format_RGB888); // Convert data from BGR (PXCImage format) to RGB (QPixmap) QPixmap pixmap = QPixmap::fromImage(temp.rgbSwapped()); emit notify_image(pixmap); } else { std::cout << "!!! TODO: support other image formats besides RGB24." << std::endl; _pipeline.ReleaseFrame(); return; } image->ReleaseAccess(&image_data); if (sts < PXC_STATUS_NO_ERROR) { std::cout << "!!! Failed ReleaseAccess." << std::endl; _pipeline.ReleaseFrame(); return; } /* Detect gestures */ PXCGesture* gesture = _pipeline.QueryGesture(); if (gesture) { // get the node data for the whole primary hand // (the one that shows up in the view first) PXCGesture::GeoNode node; sts = gesture->QueryNodeData(0, PXCGesture::GeoNode::LABEL_BODY_HAND_PRIMARY, &node); if (sts == PXC_STATUS_ITEM_UNAVAILABLE) { //std::cout << "!!! QueryNodeData: PXC_STATUS_ITEM_UNAVAILABLE" << std::endl; _pipeline.ReleaseFrame(); return; } //std::cout << "* Hand at " << node.positionWorld.x << "x" << node.positionWorld.y << " z = " << node.positionWorld.z << std::endl; PXCGesture::Gesture gest; sts = gesture->QueryGestureData(0, PXCGesture::GeoNode::LABEL_BODY_HAND_PRIMARY, 0, &gest); if (sts == PXC_STATUS_ITEM_UNAVAILABLE) { //std::cout << "!!! QueryGestureData: PXC_STATUS_ITEM_UNAVAILABLE" << std::endl; _pipeline.ReleaseFrame(); return; } if (gest.active) { // how many ms has passed since last gesture detection qint64 elapsed = _elapsed_timer.elapsed(); switch (gest.label) { case PXCGesture::Gesture::LABEL_HAND_WAVE: case PXCGesture::Gesture::LABEL_NAV_SWIPE_LEFT: case PXCGesture::Gesture::LABEL_NAV_SWIPE_RIGHT: case PXCGesture::Gesture::LABEL_NAV_SWIPE_UP: case PXCGesture::Gesture::LABEL_NAV_SWIPE_DOWN: case PXCGesture::Gesture::LABEL_HAND_CIRCLE: default: emit notify_gesture(gest); break; case PXCGesture::Gesture::LABEL_POSE_THUMB_UP: if ( (_last_gesture == PXCGesture::Gesture::LABEL_POSE_THUMB_UP && elapsed > 1000) || (_last_gesture != PXCGesture::Gesture::LABEL_POSE_THUMB_UP) ) emit notify_gesture(gest); break; case PXCGesture::Gesture::LABEL_POSE_THUMB_DOWN: if ( (_last_gesture == PXCGesture::Gesture::LABEL_POSE_THUMB_DOWN && elapsed > 1000) || (_last_gesture != PXCGesture::Gesture::LABEL_POSE_THUMB_DOWN) ) emit notify_gesture(gest); break; case PXCGesture::Gesture::LABEL_POSE_PEACE: if ( (_last_gesture == PXCGesture::Gesture::LABEL_POSE_PEACE && elapsed > 1000) || (_last_gesture != PXCGesture::Gesture::LABEL_POSE_PEACE) ) emit notify_gesture(gest); break; case PXCGesture::Gesture::LABEL_POSE_BIG5: if ( (_last_gesture == PXCGesture::Gesture::LABEL_POSE_BIG5 && elapsed > 1000) || (_last_gesture != PXCGesture::Gesture::LABEL_POSE_BIG5) ) emit notify_gesture(gest); break; } _last_gesture = gest.label; if (elapsed > 1000) _elapsed_timer.restart(); } } _pipeline.ReleaseFrame(); }