bool pcl::EnsensoGrabber::grabSingleCloud (pcl::PointCloud<pcl::PointXYZ> &cloud) { if (!device_open_) return (false); if (running_) return (false); try { NxLibCommand (cmdCapture).execute (); // Stereo matching task NxLibCommand (cmdComputeDisparityMap).execute (); // Convert disparity map into XYZ data for each pixel NxLibCommand (cmdComputePointMap).execute (); // Get info about the computed point map and copy it into a std::vector double timestamp; std::vector<float> pointMap; int width, height; camera_[itmImages][itmRaw][itmLeft].getBinaryDataInfo (0, 0, 0, 0, 0, ×tamp); // Get raw image timestamp camera_[itmImages][itmPointMap].getBinaryDataInfo (&width, &height, 0, 0, 0, 0); camera_[itmImages][itmPointMap].getBinaryData (pointMap, 0); // Copy point cloud and convert in meters cloud.header.stamp = getPCLStamp (timestamp); cloud.resize (height * width); cloud.width = width; cloud.height = height; cloud.is_dense = false; // Copy data in point cloud (and convert milimeters in meters) for (size_t i = 0; i < pointMap.size (); i += 3) { cloud.points[i / 3].x = pointMap[i] / 1000.0; cloud.points[i / 3].y = pointMap[i + 1] / 1000.0; cloud.points[i / 3].z = pointMap[i + 2] / 1000.0; } return (true); } catch (NxLibException &ex) { ensensoExceptionHandling (ex, "grabSingleCloud"); return (false); } }
int pcl::EnsensoGrabber::captureCalibrationPattern () const { if (!device_open_) return (-1); if (running_) return (-1); try { NxLibCommand (cmdCapture).execute (); NxLibCommand (cmdCollectPattern).execute (); } catch (NxLibException &ex) { ensensoExceptionHandling (ex, "captureCalibrationPattern"); return (-1); } return ( (*root_)[itmParameters][itmPatternCount].asInt ()); }
bool pcl::EnsensoGrabber::clearCalibrationPatternBuffer () const { if (!device_open_) return (false); if (running_) return (false); try { NxLibCommand (cmdDiscardPatterns).execute (); } catch (NxLibException &ex) { ensensoExceptionHandling (ex, "clearCalibrationPatternBuffer"); return (false); } return (true); }
bool pcl::EnsensoGrabber::closeDevice () { if (!device_open_) return (false); stop (); PCL_INFO ("Closing Ensenso stereo camera\n"); try { NxLibCommand (cmdClose).execute (); device_open_ = false; } catch (NxLibException &ex) { ensensoExceptionHandling (ex, "closeDevice"); return (false); } return (true); }
void pcl::EnsensoGrabber::processGrabbing () { bool continue_grabbing = running_; while (continue_grabbing) { try { // Publish cloud / images if (num_slots<sig_cb_ensenso_point_cloud> () > 0 || num_slots<sig_cb_ensenso_images> () > 0 || num_slots<sig_cb_ensenso_point_cloud_images> () > 0) { pcl::PointCloud<pcl::PointXYZ>::Ptr cloud (new pcl::PointCloud<pcl::PointXYZ>); boost::shared_ptr<PairOfImages> images (new PairOfImages); fps_mutex_.lock (); frequency_.event (); fps_mutex_.unlock (); NxLibCommand (cmdCapture).execute (); double timestamp; camera_[itmImages][itmRaw][itmLeft].getBinaryDataInfo (0, 0, 0, 0, 0, ×tamp); // Gather images if (num_slots<sig_cb_ensenso_images> () > 0 || num_slots<sig_cb_ensenso_point_cloud_images> () > 0) { // Rectify images NxLibCommand (cmdRectifyImages).execute (); int width, height, channels, bpe; bool isFlt, collected_pattern = false; try // Try to collect calibration pattern, if not possible, publish RAW images instead { NxLibCommand collect_pattern (cmdCollectPattern); collect_pattern.parameters ()[itmBuffer].set (false); // Do NOT store the pattern into the buffer! collect_pattern.execute (); collected_pattern = true; } catch (const NxLibException &ex) { // We failed to collect the pattern but the RAW images are available! } if (collected_pattern) { camera_[itmImages][itmWithOverlay][itmLeft].getBinaryDataInfo (&width, &height, &channels, &bpe, &isFlt, 0); images->first.header.stamp = images->second.header.stamp = getPCLStamp (timestamp); images->first.width = images->second.width = width; images->first.height = images->second.height = height; images->first.data.resize (width * height * sizeof(float)); images->second.data.resize (width * height * sizeof(float)); images->first.encoding = images->second.encoding = getOpenCVType (channels, bpe, isFlt); camera_[itmImages][itmWithOverlay][itmLeft].getBinaryData (images->first.data.data (), images->first.data.size (), 0, 0); camera_[itmImages][itmWithOverlay][itmRight].getBinaryData (images->second.data.data (), images->second.data.size (), 0, 0); } else { camera_[itmImages][itmRaw][itmLeft].getBinaryDataInfo (&width, &height, &channels, &bpe, &isFlt, 0); images->first.header.stamp = images->second.header.stamp = getPCLStamp (timestamp); images->first.width = images->second.width = width; images->first.height = images->second.height = height; images->first.data.resize (width * height * sizeof(float)); images->second.data.resize (width * height * sizeof(float)); images->first.encoding = images->second.encoding = getOpenCVType (channels, bpe, isFlt); camera_[itmImages][itmRaw][itmLeft].getBinaryData (images->first.data.data (), images->first.data.size (), 0, 0); camera_[itmImages][itmRaw][itmRight].getBinaryData (images->second.data.data (), images->second.data.size (), 0, 0); } } // Gather point cloud if (num_slots<sig_cb_ensenso_point_cloud> () > 0 || num_slots<sig_cb_ensenso_point_cloud_images> () > 0) { // Stereo matching task NxLibCommand (cmdComputeDisparityMap).execute (); // Convert disparity map into XYZ data for each pixel NxLibCommand (cmdComputePointMap).execute (); // Get info about the computed point map and copy it into a std::vector std::vector<float> pointMap; int width, height; camera_[itmImages][itmPointMap].getBinaryDataInfo (&width, &height, 0, 0, 0, 0); camera_[itmImages][itmPointMap].getBinaryData (pointMap, 0); // Copy point cloud and convert in meters cloud->header.stamp = getPCLStamp (timestamp); cloud->points.resize (height * width); cloud->width = width; cloud->height = height; cloud->is_dense = false; // Copy data in point cloud (and convert milimeters in meters) for (size_t i = 0; i < pointMap.size (); i += 3) { cloud->points[i / 3].x = pointMap[i] / 1000.0; cloud->points[i / 3].y = pointMap[i + 1] / 1000.0; cloud->points[i / 3].z = pointMap[i + 2] / 1000.0; } } // Publish signals if (num_slots<sig_cb_ensenso_point_cloud_images> () > 0) point_cloud_images_signal_->operator () (cloud, images); else if (num_slots<sig_cb_ensenso_point_cloud> () > 0) point_cloud_signal_->operator () (cloud); else if (num_slots<sig_cb_ensenso_images> () > 0) images_signal_->operator () (images); } continue_grabbing = running_; } catch (NxLibException &ex) { ensensoExceptionHandling (ex, "processGrabbing"); } } }
void get_en_image(pcl::PointCloud<pcl::PointXYZ> &cloud) { char flag = 'g'; int i = 0; while(flag != 'q') { ostringstream conv; conv << i; cout<<"Capturing new calibration image from the ensenso stereo vision camera."<<endl; ///Read the Ensenso stereo cameras: try { // Initialize NxLib and enumerate cameras nxLibInitialize(true); // Reference to the first camera in the node BySerialNo NxLibItem root; NxLibItem camera = root[itmCameras][itmBySerialNo][0]; // Open the Ensenso NxLibCommand open(cmdOpen); open.parameters()[itmCameras] = camera[itmSerialNumber].asString(); open.execute(); // Capture an image NxLibCommand (cmdCapture).execute(); // Stereo matching task NxLibCommand (cmdComputeDisparityMap).execute (); // Convert disparity map into XYZ data for each pixel NxLibCommand (cmdComputePointMap).execute (); // Get info about the computed point map and copy it into a std::vector double timestamp; std::vector<float> pointMap; int width, height; camera[itmImages][itmRaw][itmLeft].getBinaryDataInfo (0, 0, 0, 0, 0, ×tamp); // Get raw image timestamp camera[itmImages][itmPointMap].getBinaryDataInfo (&width, &height, 0, 0, 0, 0); camera[itmImages][itmPointMap].getBinaryData (pointMap, 0); // Copy point cloud and convert in meters //cloud.header.stamp = getPCLStamp (timestamp); cloud.resize (height * width); cloud.width = width; cloud.height = height; cloud.is_dense = false; // Copy data in point cloud (and convert milimeters in meters) for (size_t i = 0; i < pointMap.size (); i += 3) { cloud.points[i / 3].x = pointMap[i] / 1000.0; cloud.points[i / 3].y = pointMap[i + 1] / 1000.0; cloud.points[i / 3].z = pointMap[i + 2] / 1000.0; } NxLibCommand (cmdRectifyImages).execute(); // Save images NxLibCommand saveImage(cmdSaveImage); // raw left saveImage.parameters()[itmNode] = camera[itmImages][itmRaw][itmLeft].path; saveImage.parameters()[itmFilename] = "calib_en/raw_left" + conv.str()+".png"; saveImage.execute(); // raw right /*saveImage.parameters()[itmNode] = camera[itmImages][itmRaw][itmRight].path; saveImage.parameters()[itmFilename] = "calib_en/raw_right.png"; saveImage.execute(); // rectified left saveImage.parameters()[itmNode] = camera[itmImages][itmRectified][itmLeft].path; saveImage.parameters()[itmFilename] = "calib_en/rectified_left.png"; saveImage.execute(); // rectified right saveImage.parameters()[itmNode] = camera[itmImages][itmRectified][itmRight].path; saveImage.parameters()[itmFilename] = "calib_en/rectified_right.png"; saveImage.execute();*/ } catch (NxLibException& e) { // Display NxLib API exceptions, if any printf("An NxLib API error with code %d (%s) occurred while accessing item %s.\n", e.getErrorCode(), e.getErrorText().c_str(), e.getItemPath().c_str()); if (e.getErrorCode() == NxLibExecutionFailed) printf("/Execute:\n%s\n", NxLibItem(itmExecute).asJson(true).c_str()); } /*catch (NxLibException &ex) { ensensoExceptionHandling (ex, "grabSingleCloud"); }*/ catch (...) { // Display other exceptions printf("Something, somewhere went terribly wrong!\n"); } /*cout<<"Plug in the RGB camera and press any key to continue."<<endl; cin.ignore(); cin.get();*/ cout<<"Capturing new calibration image from the ensenso RGB camera."<<endl; ///Read the IDS RGB Camera attached to the Ensenso stereo camera HIDS hCam = 0; printf("Success-Code: %d\n",IS_SUCCESS); //Kamera öffnen INT nRet = is_InitCamera (&hCam, NULL); printf("Status Init %d\n",nRet); //Pixel-Clock setzen UINT nPixelClockDefault = 9; nRet = is_PixelClock(hCam, IS_PIXELCLOCK_CMD_SET, (void*)&nPixelClockDefault, sizeof(nPixelClockDefault)); printf("Status is_PixelClock %d\n",nRet); //Farbmodus der Kamera setzen //INT colorMode = IS_CM_CBYCRY_PACKED; INT colorMode = IS_CM_BGR8_PACKED; nRet = is_SetColorMode(hCam,colorMode); printf("Status SetColorMode %d\n",nRet); UINT formatID = 4; //Bildgröße einstellen -> 2592x1944 nRet = is_ImageFormat(hCam, IMGFRMT_CMD_SET_FORMAT, &formatID, 4); printf("Status ImageFormat %d\n",nRet); //Speicher für Bild alloziieren char* pMem = NULL; int memID = 0; nRet = is_AllocImageMem(hCam, 1280, 1024, 24, &pMem, &memID); printf("Status AllocImage %d\n",nRet); //diesen Speicher aktiv setzen nRet = is_SetImageMem(hCam, pMem, memID); printf("Status SetImageMem %d\n",nRet); //Bilder im Kameraspeicher belassen INT displayMode = IS_SET_DM_DIB; nRet = is_SetDisplayMode (hCam, displayMode); printf("Status displayMode %d\n",nRet); //Bild aufnehmen nRet = is_FreezeVideo(hCam, IS_WAIT); printf("Status is_FreezeVideo %d\n",nRet); //Bild aus dem Speicher auslesen und als Datei speichern String path = "./calib_en/snap_BGR"+conv.str()+".png"; std::wstring widepath; for(int i = 0; i < path.length(); ++i) widepath += wchar_t (path[i] ); IMAGE_FILE_PARAMS ImageFileParams; ImageFileParams.pwchFileName = &widepath[0]; ImageFileParams.pnImageID = NULL; ImageFileParams.ppcImageMem = NULL; ImageFileParams.nQuality = 0; ImageFileParams.nFileType = IS_IMG_PNG; nRet = is_ImageFile(hCam, IS_IMAGE_FILE_CMD_SAVE, (void*) &ImageFileParams, sizeof(ImageFileParams)); printf("Status is_ImageFile %d\n",nRet); //Kamera wieder freigeben is_ExitCamera(hCam); cout<<"To quit capturing calibration images, choose q. Else, choose any other letter."<<endl; cin >> flag; i++; } }