UEyeCaptureInterface::~UEyeCaptureInterface() { cout << "Request for killing the thread" << endl; shouldStopSpinThread = true; bool result = spinRunning.tryLock(1000); is_DisableEvent(leftCamera .mCamera, IS_SET_EVENT_FRAME); is_DisableEvent(rightCamera.mCamera, IS_SET_EVENT_FRAME); #ifdef Q_OS_WIN is_ExitEvent (leftCamera .mCamera, IS_SET_EVENT_FRAME); is_ExitEvent (rightCamera.mCamera, IS_SET_EVENT_FRAME); CloseHandle(leftCamera .mWinEvent); CloseHandle(rightCamera.mWinEvent); #endif if (result) printf("Camera thread killed\n"); else printf("Unable to exit Camera thread\n"); printf("Deleting image buffers\n"); leftCamera.deAllocImages(); rightCamera.deAllocImages(); printf("uEye: Closing camera..."); is_ExitCamera(leftCamera.mCamera); is_ExitCamera(rightCamera.mCamera); printf("done\n"); }
INT WINAPI Dispose(HIDS* m_hCam) { INT result = IS_SUCCESS; AFX_MANAGE_STATE(AfxGetStaticModuleState()); if( m_bRunning && !m_bRecording) { // stop rendering thread PostThreadMessage(m_renderThread->m_nThreadID, IS_THREAD_MESSAGE, IS_RUNNING, FALSE); // wait for thread to terminate WaitForSingleObject(m_renderThread->m_hThread, INFINITE); // stop video event notification result = is_StopLiveVideo( *m_hCam, IS_WAIT ); // run rendering thread destructor delete m_renderThread; m_renderThread = NULL; // close AVI handle and reset avi instance ID if (m_nAviID) { isavi_ExitAVI(m_nAviID); m_nAviID = 0; } // Free the allocated buffer if( m_pcImageMemory != NULL ) { is_FreeImageMem( *m_hCam, m_pcImageMemory, m_lMemoryId ); } m_pcImageMemory = NULL; // Close camera result |= is_ExitCamera(*m_hCam ); m_hCam = NULL; m_bRunning = FALSE; } else if (m_hCam) { // Close camera result = is_ExitCamera(*m_hCam ); m_hCam = NULL; } return result; }
static void ids_core_Camera_dealloc(ids_core_Camera *self) { /* Use ready flag to determine state of readiness to deallocate */ switch (self->ready) { case READY: is_ExitImageQueue(self->handle); ids_core_Camera_free_all(self, NULL, NULL); case CONNECTED: /* Attempt to close camera */ is_ExitCamera(self->handle); } Py_TYPE(self)->tp_free((PyObject*)self); }
int CameraApi::closeCamera() { for(int i=0; i<ringbufferSize; i++) { is_FreeImageMem(mhCam, ringbuffer[i], ringbufferId[i]); } delete ringbuffer; delete ringbufferId; int isRet = is_ExitCamera(mhCam); mhCam = 0; return isRet; }
////////////////////////////////////////////////////////////////////////////////// // CloseCamera ------------------------------------------------------------------- ////////////////////////////////////////////////////////////////////////////////// BOOL ofxUeye::CloseCamera(){ BOOL boRet = FALSE; if( m_hCam != 0 ) { is_EnableMessage( m_hCam, IS_FRAME, NULL ); is_StopLiveVideo( m_hCam, IS_WAIT ); if( m_pcImageMemory != NULL ) is_FreeImageMem( m_hCam, m_pcImageMemory, m_nMemoryId ); m_pcImageMemory = NULL; is_ExitCamera( m_hCam ); m_hCam = NULL; boRet = TRUE; } return boRet; }
bool FlosIDSAdaptor::closeDevice() { if(!isOpen()) { return true; } if(m_acquireThread) { PostThreadMessage(m_acquireThreadID, WM_QUIT, 0, 0); WaitForSingleObject(m_acquireThread, 10000); CloseHandle(m_acquireThread); m_acquireThread = NULL; } //is_ExitCamera frees allocated memory is_ExitCamera(m_deviceID); return true; }
void mexFunction( int nlhs, mxArray *plhs[],int nrhs, const mxArray*prhs[] ) { /* Check for proper number of arguments */ if (!(nrhs == 2) || !(nlhs==0)) { mexErrMsgTxt("You have to input camera handle"); } if (!mxIsStruct(prhs[1])) { mexErrMsgTxt("That second input has to be an image structure, you know."); } int error = 0; HCAM hCam = *(HCAM *)mxGetPr(prhs[0]); int pointer_field = mxGetFieldNumber(prhs[1],"pointer"); int ID_field = mxGetFieldNumber(prhs[1],"id"); mxArray *ppointer_field = mxGetFieldByNumber(prhs[1],0,pointer_field); char *ppImgMem = (char *)*(int *)mxGetPr(ppointer_field); mxArray *pID_field = mxGetFieldByNumber(prhs[1],0,ID_field); int id = *(int *)mxGetPr(pID_field); if (hCam!= NULL) { error = is_FreeImageMem(hCam, ppImgMem, id); if (error != IS_SUCCESS) { mexErrMsgTxt("Error freeing image memory"); } //Close and exit camera error = is_ExitCamera(hCam ); if (error != IS_SUCCESS) { mexErrMsgTxt("Error exiting camera"); } hCam = NULL; ppImgMem = NULL; // mexPrintf("Memory freed. \n"); } return; }
INT UEyeCamDriver::disconnectCam() { INT is_err = IS_SUCCESS; if (isConnected()) { setStandbyMode(); // Release existing camera buffers if (cam_buffer_ != NULL) { is_err = is_FreeImageMem(cam_handle_, cam_buffer_, cam_buffer_id_); } cam_buffer_ = NULL; // Release camera handle is_err = is_ExitCamera(cam_handle_); cam_handle_ = (HIDS) 0; std::cout << "Disconnected UEye camera '" + cam_name_ + "'" << std::endl; } return is_err; }
bool IdsSourceSink::ReleaseCamera() { is_ExitCamera(hCam); hCam=(HIDS)0; return true; }
//Open our camera bool IDSCamera::OpenCamera() { if (m_hCam!=0) { //free old image mem. is_FreeImageMem(m_hCam,m_pcImageMemory,m_lMemoryId); is_ExitCamera(m_hCam); } // init camera m_hCam = (HIDS) 0; // open next camera m_Ret = is_InitCamera(&m_hCam,NULL); // init camera if( m_Ret == IS_SUCCESS ){ // retrieve original image size SENSORINFO sInfo; is_GetSensorInfo(m_hCam,&sInfo); m_nSizeX = sInfo.nMaxWidth; m_nSizeY = sInfo.nMaxHeight; // setup the color depth to the current windows setting //is_GetColorDepth(m_hCam,&m_nBitsPerPixel,&m_nColorMode); is_SetColorMode(m_hCam, IS_SET_CM_Y8); //printf("m_nBitsPerPixel=%i m_nColorMode=%i\n",m_nBitsPerPixel,IS_SET_CM_Y8); // memory initialization is_AllocImageMem(m_hCam, m_nSizeX, m_nSizeY, m_nBitsPerPixel, &m_pcImageMemory, &m_lMemoryId); //set memory active is_SetImageMem( m_hCam, m_pcImageMemory,m_lMemoryId ); // display initialization is_SetImageSize( m_hCam, m_nSizeX, m_nSizeY ); is_SetDisplayMode( m_hCam, IS_SET_DM_DIB); // Reinit with slower frame rate for testing on vmWare with USB 1.1 if( is_LoadParameters( m_hCam, config_file ) == IS_SUCCESS ) { // realloc image mem with actual sizes and depth. is_FreeImageMem( m_hCam, m_pcImageMemory, m_lMemoryId ); m_nSizeX = is_SetImageSize( m_hCam, IS_GET_IMAGE_SIZE_X, 0 ); m_nSizeY = is_SetImageSize( m_hCam, IS_GET_IMAGE_SIZE_Y, 0 ); switch( is_SetColorMode( m_hCam, IS_GET_COLOR_MODE ) ) { case IS_SET_CM_RGB32: m_nBitsPerPixel = 32; break; case IS_SET_CM_RGB24: m_nBitsPerPixel = 24; break; case IS_SET_CM_RGB16: case IS_SET_CM_UYVY: m_nBitsPerPixel = 16; break; case IS_SET_CM_RGB15: m_nBitsPerPixel = 15; break; case IS_SET_CM_Y8: case IS_SET_CM_RGB8: case IS_SET_CM_BAYER: default: m_nBitsPerPixel = 8; break; } // memory initialization is_AllocImageMem( m_hCam, m_nSizeX, m_nSizeY, m_nBitsPerPixel, &m_pcImageMemory, &m_lMemoryId); is_SetImageMem(m_hCam, m_pcImageMemory, m_lMemoryId ); // set memory active // display initialization is_SetImageSize(m_hCam, m_nSizeX, m_nSizeY ); } } return true; }
void get_en_image(pcl::PointCloud<pcl::PointXYZ> &cloud) { char flag = 'g'; int i = 0; while(flag != 'q') { ostringstream conv; conv << i; cout<<"Capturing new calibration image from the ensenso stereo vision camera."<<endl; ///Read the Ensenso stereo cameras: try { // Initialize NxLib and enumerate cameras nxLibInitialize(true); // Reference to the first camera in the node BySerialNo NxLibItem root; NxLibItem camera = root[itmCameras][itmBySerialNo][0]; // Open the Ensenso NxLibCommand open(cmdOpen); open.parameters()[itmCameras] = camera[itmSerialNumber].asString(); open.execute(); // Capture an image NxLibCommand (cmdCapture).execute(); // Stereo matching task NxLibCommand (cmdComputeDisparityMap).execute (); // Convert disparity map into XYZ data for each pixel NxLibCommand (cmdComputePointMap).execute (); // Get info about the computed point map and copy it into a std::vector double timestamp; std::vector<float> pointMap; int width, height; camera[itmImages][itmRaw][itmLeft].getBinaryDataInfo (0, 0, 0, 0, 0, ×tamp); // Get raw image timestamp camera[itmImages][itmPointMap].getBinaryDataInfo (&width, &height, 0, 0, 0, 0); camera[itmImages][itmPointMap].getBinaryData (pointMap, 0); // Copy point cloud and convert in meters //cloud.header.stamp = getPCLStamp (timestamp); cloud.resize (height * width); cloud.width = width; cloud.height = height; cloud.is_dense = false; // Copy data in point cloud (and convert milimeters in meters) for (size_t i = 0; i < pointMap.size (); i += 3) { cloud.points[i / 3].x = pointMap[i] / 1000.0; cloud.points[i / 3].y = pointMap[i + 1] / 1000.0; cloud.points[i / 3].z = pointMap[i + 2] / 1000.0; } NxLibCommand (cmdRectifyImages).execute(); // Save images NxLibCommand saveImage(cmdSaveImage); // raw left saveImage.parameters()[itmNode] = camera[itmImages][itmRaw][itmLeft].path; saveImage.parameters()[itmFilename] = "calib_en/raw_left" + conv.str()+".png"; saveImage.execute(); // raw right /*saveImage.parameters()[itmNode] = camera[itmImages][itmRaw][itmRight].path; saveImage.parameters()[itmFilename] = "calib_en/raw_right.png"; saveImage.execute(); // rectified left saveImage.parameters()[itmNode] = camera[itmImages][itmRectified][itmLeft].path; saveImage.parameters()[itmFilename] = "calib_en/rectified_left.png"; saveImage.execute(); // rectified right saveImage.parameters()[itmNode] = camera[itmImages][itmRectified][itmRight].path; saveImage.parameters()[itmFilename] = "calib_en/rectified_right.png"; saveImage.execute();*/ } catch (NxLibException& e) { // Display NxLib API exceptions, if any printf("An NxLib API error with code %d (%s) occurred while accessing item %s.\n", e.getErrorCode(), e.getErrorText().c_str(), e.getItemPath().c_str()); if (e.getErrorCode() == NxLibExecutionFailed) printf("/Execute:\n%s\n", NxLibItem(itmExecute).asJson(true).c_str()); } /*catch (NxLibException &ex) { ensensoExceptionHandling (ex, "grabSingleCloud"); }*/ catch (...) { // Display other exceptions printf("Something, somewhere went terribly wrong!\n"); } /*cout<<"Plug in the RGB camera and press any key to continue."<<endl; cin.ignore(); cin.get();*/ cout<<"Capturing new calibration image from the ensenso RGB camera."<<endl; ///Read the IDS RGB Camera attached to the Ensenso stereo camera HIDS hCam = 0; printf("Success-Code: %d\n",IS_SUCCESS); //Kamera öffnen INT nRet = is_InitCamera (&hCam, NULL); printf("Status Init %d\n",nRet); //Pixel-Clock setzen UINT nPixelClockDefault = 9; nRet = is_PixelClock(hCam, IS_PIXELCLOCK_CMD_SET, (void*)&nPixelClockDefault, sizeof(nPixelClockDefault)); printf("Status is_PixelClock %d\n",nRet); //Farbmodus der Kamera setzen //INT colorMode = IS_CM_CBYCRY_PACKED; INT colorMode = IS_CM_BGR8_PACKED; nRet = is_SetColorMode(hCam,colorMode); printf("Status SetColorMode %d\n",nRet); UINT formatID = 4; //Bildgröße einstellen -> 2592x1944 nRet = is_ImageFormat(hCam, IMGFRMT_CMD_SET_FORMAT, &formatID, 4); printf("Status ImageFormat %d\n",nRet); //Speicher für Bild alloziieren char* pMem = NULL; int memID = 0; nRet = is_AllocImageMem(hCam, 1280, 1024, 24, &pMem, &memID); printf("Status AllocImage %d\n",nRet); //diesen Speicher aktiv setzen nRet = is_SetImageMem(hCam, pMem, memID); printf("Status SetImageMem %d\n",nRet); //Bilder im Kameraspeicher belassen INT displayMode = IS_SET_DM_DIB; nRet = is_SetDisplayMode (hCam, displayMode); printf("Status displayMode %d\n",nRet); //Bild aufnehmen nRet = is_FreezeVideo(hCam, IS_WAIT); printf("Status is_FreezeVideo %d\n",nRet); //Bild aus dem Speicher auslesen und als Datei speichern String path = "./calib_en/snap_BGR"+conv.str()+".png"; std::wstring widepath; for(int i = 0; i < path.length(); ++i) widepath += wchar_t (path[i] ); IMAGE_FILE_PARAMS ImageFileParams; ImageFileParams.pwchFileName = &widepath[0]; ImageFileParams.pnImageID = NULL; ImageFileParams.ppcImageMem = NULL; ImageFileParams.nQuality = 0; ImageFileParams.nFileType = IS_IMG_PNG; nRet = is_ImageFile(hCam, IS_IMAGE_FILE_CMD_SAVE, (void*) &ImageFileParams, sizeof(ImageFileParams)); printf("Status is_ImageFile %d\n",nRet); //Kamera wieder freigeben is_ExitCamera(hCam); cout<<"To quit capturing calibration images, choose q. Else, choose any other letter."<<endl; cin >> flag; i++; } }