int SensorRealSense::initialize() { std::cout << "SensorRealSense::initialize()" << std::endl; sense_manager = PXCSenseManager::CreateInstance(); if (!sense_manager) { wprintf_s(L"Unable to create the PXCSenseManager\n"); return false; } sense_manager->EnableStream(PXCCapture::STREAM_TYPE_COLOR, D_width/2, D_height/2, 60); sense_manager->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, D_width, D_height, 60); sense_manager->Init(); PXCSession *session = PXCSession::CreateInstance(); PXCSession::ImplDesc desc, desc1; memset(&desc, 0, sizeof(desc)); desc.group = PXCSession::IMPL_GROUP_SENSOR; desc.subgroup = PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE; if (session->QueryImpl(&desc, 0, &desc1) < PXC_STATUS_NO_ERROR) return false; PXCCapture * capture; pxcStatus status = session->CreateImpl<PXCCapture>(&desc1, &capture); if(status != PXC_STATUS_NO_ERROR){ QMessageBox::critical(NULL,"FATAL ERROR", "Intel RealSense device not plugged?\n(CreateImpl<PXCCapture> failed)"); exit(0); } PXCCapture::Device* device; device = capture->CreateDevice(0); projection = device->CreateProjection(); this->initialized = true; return true; }
void Camera::getFrame(vctDynamicVector<vct3> & points) { sm = PXCSenseManager::CreateInstance(); PXCCaptureManager *cm = sm->QueryCaptureManager(); sm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, 640, 480, 30); pxcStatus sts = sm->Init(); if (sts < PXC_STATUS_NO_ERROR) { std::cout << "DIDN\'T WORK" << std::endl; } PXCCapture::Device *device = sm->QueryCaptureManager()->QueryDevice(); device->ResetProperties(PXCCapture::STREAM_TYPE_ANY); std::cout << device->SetDepthUnit(1000) << std::endl; PXCProjection *projection = device->CreateProjection(); pxcStatus sts2 = sm->AcquireFrame(false); if (sts2 >= PXC_STATUS_NO_ERROR) { PXCCapture::Sample *sample = sm->QuerySample(); PXCImage* image = (*sample)[streamType]; PXCImage::ImageInfo info = {}; PXCImage::ImageData data; image->AcquireAccess(PXCImage::ACCESS_READ, &data); PXCImage::ImageInfo dinfo = sample->depth->QueryInfo(); int dpitch = data.pitches[0] / sizeof(short); short *dpixels = (short*)data.planes[0]; int i = 0; points.SetSize(dinfo.width * dinfo.height); PXCPoint3DF32 * vertices = new PXCPoint3DF32[dinfo.width * dinfo.height]; projection->QueryVertices(image, vertices); int c = 0; for (int i = 0; i < points.size(); i++) { PXCPoint3DF32 point = vertices[i]; if (point.z != 0) { vct3 newPoint(point.x, point.y, point.z); points[c++] = newPoint; } } points.resize(c); image->ReleaseAccess(&data); } projection->Release(); std::cout << sts2 << std::endl; }
RealsenseEngine::RealsenseEngine(const char *calibFilename)//, Vector2i requested_imageSize_rgb, Vector2i requested_imageSize_d) : ImageSourceEngine(calibFilename) { data = new PrivateData(); #ifndef SDK20 data->pp = PXCSenseManager::CreateInstance(); /* Sets file recording or playback */ data->cm = data->pp->QueryCaptureManager(); #ifndef SDK2016R3 data->pp->EnableBlob(0); PXCBlobModule *blobModule = data->pp->QueryBlob(); data-> blobData = blobModule->CreateOutput(); data->blobConfiguration = blobModule->CreateActiveConfiguration(); data->pp->EnableStream(PXCCapture::StreamType::STREAM_TYPE_COLOR, 640, 480, 30); // Enable blob color mapping data->blobConfiguration->EnableColorMapping(true); data->blobConfiguration->EnableContourExtraction(true); data->blobConfiguration->EnableSegmentationImage(true); data->blobConfiguration->ApplyChanges(); #endif data->pp->EnableStream(PXCCapture::StreamType::STREAM_TYPE_DEPTH, 640, 480, 30); /* Initializes the pipeline */ data->sts = data->pp->Init(); if (data->sts<PXC_STATUS_NO_ERROR) { wprintf_s(L"Failed to locate any video stream(s)\n"); data->pp->Release(); return ; //return sts; } #ifndef SDK2016R3 data->blobConfiguration->Update(); #endif /* Reset all properties */ PXCCapture::Device *device = data->pp->QueryCaptureManager()->QueryDevice(); device->ResetProperties(PXCCapture::STREAM_TYPE_ANY); PXCProjection *projection = device->CreateProjection(); /* Get a calibration instance */ PXCCalibration *calib = projection-> QueryInstance<PXCCalibration>(); PXCCalibration::StreamCalibration clibration; PXCCalibration::StreamTransform transformation; pxcStatus ret= calib->QueryStreamProjectionParameters(PXCCapture::STREAM_TYPE_COLOR, &clibration, &transformation); PXCSizeI32 s = data->cm->QueryImageSize(PXCCapture::STREAM_TYPE_COLOR); ImageSourceEngine::calib.intrinsics_rgb.SetFrom(clibration.focalLength.x, clibration.focalLength.y, clibration.principalPoint.x, clibration.principalPoint.y,s.width,s.height); Matrix4f trans; trans.setIdentity(); trans.m00 = transformation.rotation[0][0]; trans.m01 = transformation.rotation[0][1]; trans.m02 = transformation.rotation[0][2]; trans.m10 = transformation.rotation[1][0]; trans.m11 = transformation.rotation[1][1]; trans.m12 = transformation.rotation[1][2]; trans.m20 = transformation.rotation[2][0]; trans.m21 = transformation.rotation[2][1]; trans.m22 = transformation.rotation[2][2]; trans.m03 = transformation.translation[0]; trans.m13 = transformation.translation[1]; trans.m23 = transformation.translation[2]; ImageSourceEngine::calib.trafo_rgb_to_depth.SetFrom(trans); s = data->cm->QueryImageSize(PXCCapture::STREAM_TYPE_DEPTH); ret = calib->QueryStreamProjectionParameters(PXCCapture::STREAM_TYPE_DEPTH, &clibration, &transformation); ImageSourceEngine::calib.intrinsics_d.SetFrom(clibration.focalLength.x, clibration.focalLength.y, clibration.principalPoint.x, clibration.principalPoint.y, s.width, s.height); /* Release the interface */ projection->Release(); /* Set mirror mode */ //if (cmdl.m_bMirror) { // device->SetMirrorMode(PXCCapture::Device::MirrorMode::MIRROR_MODE_HORIZONTAL); //} //else { // device->SetMirrorMode(PXCCapture::Device::MirrorMode::MIRROR_MODE_DISABLED); //} #else rs2::config cfg; cfg.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_BGR8, 30); cfg.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30); data->profile = data->pipe.start(cfg); //rs2::video_stream_profile color_stream= data->profile.get_stream(RS2_STREAM_COLOR).as<rs2::video_stream_profile>(); //rs2::video_stream_profile depth_stream = data->profile.get_stream(RS2_STREAM_DEPTH).as<rs2::video_stream_profile>(); //rs2_extrinsics extri= depth_stream.get_extrinsics_to(color_stream); //rs2_intrinsics intri = depth_stream.get_intrinsics(); // depth // ppx 321.966583 float // ppy 246.107162 float // fx 475.193237 float // fy 475.193146 float // // color // ppx 304.000061 float // ppy 223.181778 float // fx 615.904358 float // fy 615.904419 float //- rotation 0x000000846513f198 {0.999991775, -0.00406141346, -0.000183502605, 0.00406062370, 0.999983311, -0.00411631726, ...} float[9] // [0] 0.999991775 float //[1] - 0.00406141346 float //[2] - 0.000183502605 float //[3] 0.00406062370 float //[4] 0.999983311 float //[5] - 0.00411631726 float //[6] 0.000200217590 float //[7] 0.00411553821 float //[8] 0.999991536 float //- translation 0x000000846513f1bc {0.0247000027, 8.39376517e-05, 0.00402066438} float[3] //[0] 0.0247000027 float //[1] 8.39376517e-05 float //[2] 0.00402066438 float //data->depth_scale = get_depth_scale(data->profile.get_device()); //Pipeline could choose a device that does not have a color stream //If there is no color stream, choose to align depth to another stream data->align_to = find_stream_to_align(data->profile.get_streams()); // Create a rs2::align object. // rs2::align allows us to perform alignment of depth frames to others frames //The "align_to" is the stream type to which we plan to align depth frames. // data->align= &rs2::align(data->align_to); // Define a variable for controlling the distance to clip #endif }