void initializeHandTracking() { // 手の検出器を作成する handAnalyzer = senseManager->QueryHand(); if ( handAnalyzer == 0 ) { throw std::runtime_error( "手の検出器の取得に失敗しました" ); } // 手のデータを取得する handData = handAnalyzer->CreateOutput(); if ( handData == 0 ) { throw std::runtime_error( "手の検出器の作成に失敗しました" ); } // RealSense カメラであれば、プロパティを設定する PXCCapture::DeviceInfo dinfo; senseManager->QueryCaptureManager()->QueryDevice()->QueryDeviceInfo( &dinfo ); if ( dinfo.model == PXCCapture::DEVICE_MODEL_IVCAM ) { PXCCapture::Device *device = senseManager->QueryCaptureManager()->QueryDevice(); device->SetDepthConfidenceThreshold( 1 ); //device->SetMirrorMode( PXCCapture::Device::MIRROR_MODE_DISABLED ); device->SetIVCAMFilterOption( 6 ); } // Hand Module Configuration PXCHandConfiguration* config = handAnalyzer->CreateActiveConfiguration(); //config->EnableNormalizedJoints( showNormalizedSkeleton ); //config->SetTrackingMode( PXCHandData::TRACKING_MODE_EXTREMITIES ); //config->EnableAllAlerts(); config->EnableSegmentationImage( true ); config->ApplyChanges(); config->Update(); }
int SensorRealSense::initialize() { std::cout << "SensorRealSense::initialize()" << std::endl; sense_manager = PXCSenseManager::CreateInstance(); if (!sense_manager) { wprintf_s(L"Unable to create the PXCSenseManager\n"); return false; } sense_manager->EnableStream(PXCCapture::STREAM_TYPE_COLOR, D_width/2, D_height/2, 60); sense_manager->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, D_width, D_height, 60); sense_manager->Init(); PXCSession *session = PXCSession::CreateInstance(); PXCSession::ImplDesc desc, desc1; memset(&desc, 0, sizeof(desc)); desc.group = PXCSession::IMPL_GROUP_SENSOR; desc.subgroup = PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE; if (session->QueryImpl(&desc, 0, &desc1) < PXC_STATUS_NO_ERROR) return false; PXCCapture * capture; pxcStatus status = session->CreateImpl<PXCCapture>(&desc1, &capture); if(status != PXC_STATUS_NO_ERROR){ QMessageBox::critical(NULL,"FATAL ERROR", "Intel RealSense device not plugged?\n(CreateImpl<PXCCapture> failed)"); exit(0); } PXCCapture::Device* device; device = capture->CreateDevice(0); projection = device->CreateProjection(); this->initialized = true; return true; }
bool GesturePipeline::init() { _pxcSenseManager = PXCSenseManager::CreateInstance(); /* Set Mode & Source */ pxcCHAR * deviceName = retrieveDeviceName(); _pxcSenseManager->QueryCaptureManager()->FilterByDeviceInfo(deviceName, 0, 0); pxcStatus status = _pxcSenseManager->EnableHand(); _pxcHandModule = _pxcSenseManager->QueryHand(); if (_pxcHandModule == NULL || status != pxcStatus::PXC_STATUS_NO_ERROR) { cocos2d::log("Failed to pair the gesture module with I/O"); return 0; } /* Init */ if (_pxcSenseManager->Init() >= PXC_STATUS_NO_ERROR) { _pxcHandData = _pxcHandModule->CreateOutput(); initRules(_pxcHandData); // IF IVCAM Set the following properties PXCCapture::Device *device = _pxcSenseManager->QueryCaptureManager()->QueryDevice(); PXCCapture::DeviceInfo dinfo; device->QueryDeviceInfo(&dinfo); if (dinfo.model == PXCCapture::DEVICE_MODEL_IVCAM) { device->SetDepthConfidenceThreshold(1); device->SetMirrorMode(PXCCapture::Device::MIRROR_MODE_DISABLED); device->SetIVCAMFilterOption(6); } // Hand Module Configuration PXCHandConfiguration* config = _pxcHandModule->CreateActiveConfiguration(); config->SetTrackingMode(PXCHandData::TrackingModeType::TRACKING_MODE_FULL_HAND); config->EnableSegmentationImage(true); config->ApplyChanges(); config->Update(); config->Release(); return 1; } else { cocos2d::log("Init Failed"); return 0; } }
PXCCapture::Device* real_sense_info(PXCCaptureManager *pCaptureManager){ //Info PXCCapture::Device *device = pCaptureManager->QueryDevice(); PXCCapture::DeviceInfo device_info = {}; device->QueryDeviceInfo(&device_info); wprintf(device_info.name); cout << endl; cout << "Firmware: " << device_info.firmware[0] << "." << device_info.firmware[1] << "." << device_info.firmware[2] << "." << device_info.firmware[3] << endl; PXCPointF32 fov = device->QueryDepthFieldOfView(); cout << "Depth Horizontal Field Of View: " << fov.x << endl; cout << "Depth Vertical Field Of View: " << fov.y << endl; PXCSizeI32 csize = pCaptureManager->QueryImageSize(PXCCapture::STREAM_TYPE_COLOR); cout << "Color Resolution: " << csize.width << " * " << csize.height << endl; PXCSizeI32 dsize = pCaptureManager->QueryImageSize(PXCCapture::STREAM_TYPE_DEPTH); cout << "Depth Resolution: " << dsize.width << " * " << dsize.height << endl; //Camera calibration cout << "Calibrating" << endl; device->SetDepthConfidenceThreshold(6); device->SetIVCAMFilterOption(5); device->SetIVCAMLaserPower(16); device->SetIVCAMMotionRangeTradeOff(16); device->SetIVCAMAccuracy(device->IVCAM_ACCURACY_MEDIAN); cout << "Depth Setting - OK - Calibrated" << endl; return device; }
void Camera::getFrame(vctDynamicVector<vct3> & points) { sm = PXCSenseManager::CreateInstance(); PXCCaptureManager *cm = sm->QueryCaptureManager(); sm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, 640, 480, 30); pxcStatus sts = sm->Init(); if (sts < PXC_STATUS_NO_ERROR) { std::cout << "DIDN\'T WORK" << std::endl; } PXCCapture::Device *device = sm->QueryCaptureManager()->QueryDevice(); device->ResetProperties(PXCCapture::STREAM_TYPE_ANY); std::cout << device->SetDepthUnit(1000) << std::endl; PXCProjection *projection = device->CreateProjection(); pxcStatus sts2 = sm->AcquireFrame(false); if (sts2 >= PXC_STATUS_NO_ERROR) { PXCCapture::Sample *sample = sm->QuerySample(); PXCImage* image = (*sample)[streamType]; PXCImage::ImageInfo info = {}; PXCImage::ImageData data; image->AcquireAccess(PXCImage::ACCESS_READ, &data); PXCImage::ImageInfo dinfo = sample->depth->QueryInfo(); int dpitch = data.pitches[0] / sizeof(short); short *dpixels = (short*)data.planes[0]; int i = 0; points.SetSize(dinfo.width * dinfo.height); PXCPoint3DF32 * vertices = new PXCPoint3DF32[dinfo.width * dinfo.height]; projection->QueryVertices(image, vertices); int c = 0; for (int i = 0; i < points.size(); i++) { PXCPoint3DF32 point = vertices[i]; if (point.z != 0) { vct3 newPoint(point.x, point.y, point.z); points[c++] = newPoint; } } points.resize(c); image->ReleaseAccess(&data); } projection->Release(); std::cout << sts2 << std::endl; }
void initializeHandTracking() { // 手の検出器を作成する handAnalyzer = senseManager->QueryHand(); if ( handAnalyzer == 0 ) { throw std::runtime_error( "手の検出器の取得に失敗しました" ); } // 手のデータを取得する handData = handAnalyzer->CreateOutput(); if ( handData == 0 ) { throw std::runtime_error( "手の検出器の作成に失敗しました" ); } // RealSense カメラであれば、プロパティを設定する PXCCapture::DeviceInfo dinfo; senseManager->QueryCaptureManager()->QueryDevice()->QueryDeviceInfo( &dinfo ); if ( dinfo.model == PXCCapture::DEVICE_MODEL_IVCAM ) { PXCCapture::Device *device = senseManager->QueryCaptureManager()->QueryDevice(); device->SetDepthConfidenceThreshold( 1 ); //device->SetMirrorMode( PXCCapture::Device::MIRROR_MODE_DISABLED ); device->SetIVCAMFilterOption( 6 ); } // 手の設定 handConfig = handAnalyzer->CreateActiveConfiguration(); // 登録されているジェスチャーを列挙する auto num = handConfig->QueryGesturesTotalNumber(); for ( int i = 0; i < num; i++ ){ pxcCHAR gestureName[PXCHandData::MAX_NAME_SIZE]; auto sts = handConfig->QueryGestureNameByIndex( i, PXCHandData::MAX_NAME_SIZE, gestureName ); if ( sts == PXC_STATUS_NO_ERROR ){ std::wcout << std::hex << i << " " << gestureName << std::endl; } } handConfig->ApplyChanges(); handConfig->Update(); }
void RealsensePropGetListener::getValue(imaqkit::IPropInfo* propertyInfo, void* value) { const char* propname = propertyInfo->getPropertyName(); PXCSenseManager *psm = 0; psm = PXCSenseManager::CreateInstance(); psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 640, 480); psm->Init(); if (strcmp(propname, "color_brightness") == 0) { PXCCapture::Sample *sample = psm->QuerySample(); PXCCapture::Device *device = psm->QueryCaptureManager()->QueryDevice(); pxcI32 brightness = device->QueryColorBrightness(); *(reinterpret_cast<int*>(value)) = brightness; } else { assert(false && "Unhandled property . Need to add this new property."); } psm->Release(); }
void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[] ) { UInt64 *MXadress; if(nrhs==0) { printf("Close failed: Give Pointer to intel camera as input\n"); mexErrMsgTxt("Intel camera error"); } MXadress = (UInt64*)mxGetData(prhs[0]); if(MXadress[0]==0) { return; } UtilPipeline* utilPipelineP = (UtilPipeline*)MXadress[0]; UtilPipeline& utilPipeline = utilPipelineP[0]; PXCImage *rgbImage = utilPipeline.QueryImage(PXCImage::IMAGE_TYPE_COLOR); PXCImage::ImageData rgbImageData; rgbImage->AcquireAccess(PXCImage::ACCESS_READ,&rgbImageData); //if(depthImageData.format != PXCImage::COLOR_FORMAT_DEPTH) //{ // mexErrMsgTxt("COLOR_FORMAT_DEPTH error"); //} if(rgbImageData.type != PXCImage::SURFACE_TYPE_SYSTEM_MEMORY) { mexErrMsgTxt("SURFACE_TYPE_SYSTEM_MEMORY error"); } PXCImage::ImageInfo rgbInfo; rgbImage->QueryInfo(&rgbInfo); printf("RGB Image : Width %d, Height %d \r\n",rgbInfo.width,rgbInfo.height); mwSize dimsRGB[3]; dimsRGB[0]=3; dimsRGB[1]=rgbInfo.width; dimsRGB[2]=rgbInfo.height; unsigned char *Iout; plhs[0] = mxCreateNumericArray(3, dimsRGB, mxUINT8_CLASS, mxREAL); Iout = (unsigned char*)mxGetData(plhs[0]); memcpy (Iout,rgbImageData.planes[0],dimsRGB[0]*dimsRGB[1]*dimsRGB[2]); rgbImage->ReleaseAccess(&rgbImageData); if(nlhs>1) { UtilCapture *capture = utilPipeline.QueryCapture(); if(!capture) { printf("No valid capture object\n"); return; } PXCCapture::Device *device = capture->QueryDevice(); if(!device) { printf("No valid device object\n"); return; } // Get Camera Projection Data PXCSession *session = utilPipeline.QuerySession(); pxcUID pid; device->QueryPropertyAsUID(PXCCapture::Device::PROPERTY_PROJECTION_SERIALIZABLE,&pid); PXCSmartPtr<PXCProjection> projection; PXCMetadata *metadata = session->DynamicCast<PXCMetadata>(); metadata->CreateSerializable<PXCProjection>( pid, &projection ); if(!projection) { printf("No valid projection data\n"); return; } pxcU32 npoints = (pxcU32) dimsRGB[1]*(pxcU32)dimsRGB[2]; PXCPointF32 *posc = new PXCPointF32[npoints]; PXCPointF32 *posd = new PXCPointF32[npoints]; int i=0; for (int y=0;y< dimsRGB[2];y++) { for (int x=0;x< dimsRGB[1];x++) { posc[i].x=(pxcF32)x, posc[i].y=(pxcF32)y; i++; } } projection->MapColorCoordinatesToDepth(npoints,posc,posd); //projection->Release(); mwSize dimsM[3]; dimsM[0]=2; dimsM[1]=dimsRGB[1]; dimsM[2]=dimsRGB[2]; plhs[1] = mxCreateNumericArray(3, dimsM, mxSINGLE_CLASS, mxREAL); float* Mout = (float*)mxGetData(plhs[1]); memcpy (Mout,posd,dimsM[0]*dimsM[1]*dimsM[2]*sizeof(float)); delete(posc); delete(posd); } }
RealsenseEngine::RealsenseEngine(const char *calibFilename)//, Vector2i requested_imageSize_rgb, Vector2i requested_imageSize_d) : ImageSourceEngine(calibFilename) { data = new PrivateData(); #ifndef SDK20 data->pp = PXCSenseManager::CreateInstance(); /* Sets file recording or playback */ data->cm = data->pp->QueryCaptureManager(); #ifndef SDK2016R3 data->pp->EnableBlob(0); PXCBlobModule *blobModule = data->pp->QueryBlob(); data-> blobData = blobModule->CreateOutput(); data->blobConfiguration = blobModule->CreateActiveConfiguration(); data->pp->EnableStream(PXCCapture::StreamType::STREAM_TYPE_COLOR, 640, 480, 30); // Enable blob color mapping data->blobConfiguration->EnableColorMapping(true); data->blobConfiguration->EnableContourExtraction(true); data->blobConfiguration->EnableSegmentationImage(true); data->blobConfiguration->ApplyChanges(); #endif data->pp->EnableStream(PXCCapture::StreamType::STREAM_TYPE_DEPTH, 640, 480, 30); /* Initializes the pipeline */ data->sts = data->pp->Init(); if (data->sts<PXC_STATUS_NO_ERROR) { wprintf_s(L"Failed to locate any video stream(s)\n"); data->pp->Release(); return ; //return sts; } #ifndef SDK2016R3 data->blobConfiguration->Update(); #endif /* Reset all properties */ PXCCapture::Device *device = data->pp->QueryCaptureManager()->QueryDevice(); device->ResetProperties(PXCCapture::STREAM_TYPE_ANY); PXCProjection *projection = device->CreateProjection(); /* Get a calibration instance */ PXCCalibration *calib = projection-> QueryInstance<PXCCalibration>(); PXCCalibration::StreamCalibration clibration; PXCCalibration::StreamTransform transformation; pxcStatus ret= calib->QueryStreamProjectionParameters(PXCCapture::STREAM_TYPE_COLOR, &clibration, &transformation); PXCSizeI32 s = data->cm->QueryImageSize(PXCCapture::STREAM_TYPE_COLOR); ImageSourceEngine::calib.intrinsics_rgb.SetFrom(clibration.focalLength.x, clibration.focalLength.y, clibration.principalPoint.x, clibration.principalPoint.y,s.width,s.height); Matrix4f trans; trans.setIdentity(); trans.m00 = transformation.rotation[0][0]; trans.m01 = transformation.rotation[0][1]; trans.m02 = transformation.rotation[0][2]; trans.m10 = transformation.rotation[1][0]; trans.m11 = transformation.rotation[1][1]; trans.m12 = transformation.rotation[1][2]; trans.m20 = transformation.rotation[2][0]; trans.m21 = transformation.rotation[2][1]; trans.m22 = transformation.rotation[2][2]; trans.m03 = transformation.translation[0]; trans.m13 = transformation.translation[1]; trans.m23 = transformation.translation[2]; ImageSourceEngine::calib.trafo_rgb_to_depth.SetFrom(trans); s = data->cm->QueryImageSize(PXCCapture::STREAM_TYPE_DEPTH); ret = calib->QueryStreamProjectionParameters(PXCCapture::STREAM_TYPE_DEPTH, &clibration, &transformation); ImageSourceEngine::calib.intrinsics_d.SetFrom(clibration.focalLength.x, clibration.focalLength.y, clibration.principalPoint.x, clibration.principalPoint.y, s.width, s.height); /* Release the interface */ projection->Release(); /* Set mirror mode */ //if (cmdl.m_bMirror) { // device->SetMirrorMode(PXCCapture::Device::MirrorMode::MIRROR_MODE_HORIZONTAL); //} //else { // device->SetMirrorMode(PXCCapture::Device::MirrorMode::MIRROR_MODE_DISABLED); //} #else rs2::config cfg; cfg.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_BGR8, 30); cfg.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30); data->profile = data->pipe.start(cfg); //rs2::video_stream_profile color_stream= data->profile.get_stream(RS2_STREAM_COLOR).as<rs2::video_stream_profile>(); //rs2::video_stream_profile depth_stream = data->profile.get_stream(RS2_STREAM_DEPTH).as<rs2::video_stream_profile>(); //rs2_extrinsics extri= depth_stream.get_extrinsics_to(color_stream); //rs2_intrinsics intri = depth_stream.get_intrinsics(); // depth // ppx 321.966583 float // ppy 246.107162 float // fx 475.193237 float // fy 475.193146 float // // color // ppx 304.000061 float // ppy 223.181778 float // fx 615.904358 float // fy 615.904419 float //- rotation 0x000000846513f198 {0.999991775, -0.00406141346, -0.000183502605, 0.00406062370, 0.999983311, -0.00411631726, ...} float[9] // [0] 0.999991775 float //[1] - 0.00406141346 float //[2] - 0.000183502605 float //[3] 0.00406062370 float //[4] 0.999983311 float //[5] - 0.00411631726 float //[6] 0.000200217590 float //[7] 0.00411553821 float //[8] 0.999991536 float //- translation 0x000000846513f1bc {0.0247000027, 8.39376517e-05, 0.00402066438} float[3] //[0] 0.0247000027 float //[1] 8.39376517e-05 float //[2] 0.00402066438 float //data->depth_scale = get_depth_scale(data->profile.get_device()); //Pipeline could choose a device that does not have a color stream //If there is no color stream, choose to align depth to another stream data->align_to = find_stream_to_align(data->profile.get_streams()); // Create a rs2::align object. // rs2::align allows us to perform alignment of depth frames to others frames //The "align_to" is the stream type to which we plan to align depth frames. // data->align= &rs2::align(data->align_to); // Define a variable for controlling the distance to clip #endif }