EyeCameraCapture::EyeCameraCapture(GUID cameraGUID, CLEyeCameraColorMode mode, CLEyeCameraResolution resolution, float fps) : _cameraGUID(cameraGUID), _cam(NULL), _mode(mode), _resolution(resolution), _fps(fps) { // Create camera instance _cam = CLEyeCreateCamera(_cameraGUID, _mode, _resolution, _fps); if(_cam == NULL) return; // Get camera frame dimensions CLEyeCameraGetFrameDimensions(_cam, _width, _height); // Start capturing CLEyeCameraStart(_cam); }
bool PS3::StartCapture() { if ( !_bRunning || _bCapture ) { return false; } if ( !CLEyeCameraStart( _cam ) ) { return false; } _bCapture = true; return true; }
//! Run void PS3::Run() { if ( !_bInitialized ) { return; } int width, height; //IplImage *pCapImage; _cam = CLEyeCreateCamera( _camGUID, _camColorMode, _camResolution, _frameRate ); //Sleep( 300 ); if ( _cam == NULL ) { printf( "\nPS3::Run()\tCould not create camera\n" ); return; //! Could not create camera } //printf( "\tPS3::Run()\t#1\n" ); CLEyeCameraGetFrameDimensions( _cam, width, height ); _pCapBuffer = new unsigned char[width * height * 4]; //printf( "PS3::Run() p=%p\n", _pCapBuffer ); //if ( _camColorMode == CLEYE_COLOR_PROCESSED || _camColorMode == CLEYE_COLOR_RAW ) { // pCapImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 4 ); //} else { // pCapImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 ); //} //! Set some camera parameters CLEyeSetCameraParameter( _cam, CLEYE_AUTO_GAIN, _bAutoGain ); CLEyeSetCameraParameter( _cam, CLEYE_GAIN, _gainValue ); CLEyeSetCameraParameter( _cam, CLEYE_AUTO_EXPOSURE, _bAutoExposure ); CLEyeSetCameraParameter( _cam, CLEYE_EXPOSURE, _exposureValue ); CLEyeSetCameraParameter( _cam, CLEYE_AUTO_WHITEBALANCE, _bAutoWhiteBalance ); CLEyeSetCameraParameter( _cam, CLEYE_WHITEBALANCE_RED, _whiteBalanceRed ); CLEyeSetCameraParameter( _cam, CLEYE_WHITEBALANCE_GREEN, _whiteBalanceGreen ); CLEyeSetCameraParameter( _cam, CLEYE_WHITEBALANCE_BLUE, _whiteBalanceBlue ); CLEyeSetCameraParameter( _cam, CLEYE_HFLIP, _bHFlip ); CLEyeSetCameraParameter( _cam, CLEYE_VFLIP, _bVFlip ); //! Start capturing if ( !CLEyeCameraStart( _cam ) ) { std::cout << "Could not start camera!\n" << std::endl; return; } //Sleep( 300 ); }
CameraControl * camera_control_new(int cameraID) { CameraControl* cc = (CameraControl*) calloc(1, sizeof(CameraControl)); cc->cameraID = cameraID; #if defined(CAMERA_CONTROL_USE_CL_DRIVER) int w, h; int cams = CLEyeGetCameraCount(); if (cams <= cameraID) { free(cc); return NULL; } GUID cguid = CLEyeGetCameraUUID(cameraID); cc->camera = CLEyeCreateCamera(cguid, CLEYE_COLOR_PROCESSED, CLEYE_VGA, 60); CLEyeCameraGetFrameDimensions(cc->camera, &w, &h); // Depending on color mode chosen, create the appropriate OpenCV image cc->frame4ch = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 4); cc->frame3ch = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 3); CLEyeCameraStart(cc->camera); #else char *video = psmove_util_get_env_string(PSMOVE_TRACKER_FILENAME_ENV); if (video) { psmove_DEBUG("Using '%s' as video input.\n", video); cc->capture = cvCaptureFromFile(video); free(video); } else { cc->capture = cvCaptureFromCAM(cc->cameraID); int width, height; get_metrics(&width, &height); cvSetCaptureProperty(cc->capture, CV_CAP_PROP_FRAME_WIDTH, width); cvSetCaptureProperty(cc->capture, CV_CAP_PROP_FRAME_HEIGHT, height); } #endif return cc; }
void ofxPS3::cameraInitializationLogic() { cameraType = PS3; int camNum = CLEyeGetCameraCount(); for (int i=0;i<camNum;i++) { if (CLEyeGetCameraUUID(i) == guid) { index = i; break; } } if (index == -1) return; ps3EyeCamera = CLEyeCreateCamera(guid, (depth == 1) ? (cameraPixelMode ? CLEYE_MONO_PROCESSED : CLEYE_MONO_RAW) : (cameraPixelMode ? CLEYE_COLOR_PROCESSED : CLEYE_COLOR_RAW), width * height > 320 * 240 ? CLEYE_VGA : CLEYE_QVGA, framerate); Sleep(500); for (int i=0;i<cameraBaseSettings->propertyType.size();i++) setCameraFeature(cameraBaseSettings->propertyType[i],cameraBaseSettings->propertyFirstValue[i],cameraBaseSettings->propertySecondValue[i],cameraBaseSettings->isPropertyAuto[i],cameraBaseSettings->isPropertyOn[i]); CLEyeCameraStart(ps3EyeCamera); }
bool open(int _index) { close(); int cams = CLEyeGetCameraCount(); std::cout << "CLEyeGetCameraCount() found " << cams << " devices." << std::endl; if (_index < cams) { std::cout << "Attempting to open camera " << _index << " of " << cams << "." << std::endl; GUID guid = CLEyeGetCameraUUID(_index); m_eye = CLEyeCreateCamera(guid, CLEYE_COLOR_PROCESSED, CLEYE_VGA, 75); CLEyeCameraGetFrameDimensions(m_eye, m_width, m_height); m_frame4ch = cvCreateImage(cvSize(m_width, m_height), IPL_DEPTH_8U, 4); m_frame = cvCreateImage(cvSize(m_width, m_height), IPL_DEPTH_8U, 3); CLEyeCameraStart(m_eye); CLEyeSetCameraParameter(m_eye, CLEYE_AUTO_EXPOSURE, false); CLEyeSetCameraParameter(m_eye, CLEYE_AUTO_GAIN, false); m_index = _index; } return isOpened(); }
void CLEyeCameraCapture::Run() { // Create camera instance _cam = CLEyeCreateCamera(_cameraGUID, _mode, _resolution, _fps); if(_cam == NULL) return; // Get camera frame dimensions CLEyeCameraGetFrameDimensions(_cam, w, h); // Depending on color mode chosen, create the appropriate OpenCV image if(_mode == CLEYE_COLOR_PROCESSED || _mode == CLEYE_COLOR_RAW) pCapImage = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 4); else pCapImage = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 1); // Set some camera parameters //CLEyeSetCameraParameter(_cam, CLEYE_GAIN, 20); //CLEyeSetCameraParameter(_cam, CLEYE_EXPOSURE, 511); CLEyeSetCameraParameter(_cam, CLEYE_AUTO_GAIN, true); CLEyeSetCameraParameter(_cam, CLEYE_AUTO_EXPOSURE, true); CLEyeSetCameraParameter( _cam, CLEYE_HFLIP, true); // Start capturing CLEyeCameraStart(_cam); cvGetImageRawData(pCapImage, &pCapBuffer); pCapture = pCapImage; long frames = 0; long count = GetTickCount(); long prevCount = 0; double fps = 0; // image capturing loop Mat src_gray, subImage, subImage_gray; vector<Vec3f> circles; Point center; Point n_center; int radius = 0; int counter = 0; char* fpsText = new char[5]; char* pos_text = new char[10]; while(_running) { CLEyeCameraGetFrame(_cam, pCapBuffer); //check fps every 100 frames frames++; if((frames % 100) == 0){ prevCount = count; count = GetTickCount(); fps = 100000.0/(count - prevCount); //std::cout << "fps: " << fps << endl; sprintf(fpsText, "fps: %f", fps); } if(frames > 100) putText(pCapture, fpsText, Point(5, 20), CV_FONT_HERSHEY_PLAIN, 1, Scalar(0, 255, 0)); else putText(pCapture, "calculating fps...", Point(5, 20), CV_FONT_HERSHEY_PLAIN, 1, Scalar(0, 255, 0)); //find circle in whole area of frame first if(!_isTracking){ CircleDetector(pCapture, src_gray, circles, center, radius); if(circles.size() != 0) _isTracking = true; n_center = center; } //dynamically move subimage area by tracking the object else { int subImage_size = 30; Point temp = FixSubImageSize(n_center, 320, 240, subImage_size); Rect t_rect(temp.x - subImage_size, temp.y - subImage_size, subImage_size*2, subImage_size*2); subImage = pCapture(t_rect); CircleDetector(subImage, subImage_gray, circles, center, radius); imshow(trackingWindowName, subImage); if(circles.size() == 0) { counter++; if(counter == 3) { _isTracking = false; counter = 0; cout << "Lost tracking! Search whole frame." << endl; } } else { counter = 0; n_center.x = temp.x - subImage_size + center.x; n_center.y = temp.y - subImage_size + center.y; cout << "fps: " << fps << " x:" << n_center.x << ", y:" << n_center.y << endl; } } sprintf(pos_text, "x=%d,y=%d", n_center.x, n_center.y); if(circles.size() != 0){ putText(pCapture, pos_text, Point(n_center.x + radius, n_center.y - radius), CV_FONT_HERSHEY_PLAIN, 1, Scalar(0, 255, 0)); } imshow(_windowName, pCapture); } // Stop camera capture CLEyeCameraStop(_cam); // Destroy camera object CLEyeDestroyCamera(_cam); // Destroy the allocated OpenCV image cvReleaseImage(&pCapImage); _cam = NULL; }
CameraControl * camera_control_new_with_settings(int cameraID, int width, int height, int framerate, int cam_type) { CameraControl* cc = (CameraControl*) calloc(1, sizeof(CameraControl)); cc->cameraID = cameraID; if (framerate <= 0) { framerate = PSMOVE_TRACKER_DEFAULT_FPS; } if (cam_type == PSMove_Camera_PS3EYE_BLUEDOT) { cc->focl_x = (float)PS3EYE_FOCAL_LENGTH_BLUE; cc->focl_y = (float)PS3EYE_FOCAL_LENGTH_BLUE; } else if (cam_type == PSMove_Camera_PS3EYE_REDDOT) { cc->focl_x = (float)PS3EYE_FOCAL_LENGTH_RED; cc->focl_y = (float)PS3EYE_FOCAL_LENGTH_RED; } else if (cam_type == PSMove_Camera_Unknown) { cc->focl_x = (float)PS3EYE_FOCAL_LENGTH_BLUE; cc->focl_y = (float)PS3EYE_FOCAL_LENGTH_BLUE; } // Needed for cbb tracker. Will be overwritten by camera calibration files if they exist. #if defined(CAMERA_CONTROL_USE_CL_DRIVER) // Windows 32-bit. Either CL_SDK or Registry_requiring int cams = CLEyeGetCameraCount(); if (cams <= cameraID) { free(cc); return NULL; } GUID cguid = CLEyeGetCameraUUID(cameraID); cc->camera = CLEyeCreateCamera(cguid, CLEYE_COLOR_PROCESSED, CLEYE_VGA, framerate); CLEyeCameraGetFrameDimensions(cc->camera, &width, &height); // Depending on color mode chosen, create the appropriate OpenCV image cc->frame4ch = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 4); cc->frame3ch = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3); CLEyeCameraStart(cc->camera); #elif defined(CAMERA_CONTROL_USE_PS3EYE_DRIVER) // Mac or Windows // Initialize PS3EYEDriver ps3eye_init(); int cams = ps3eye_count_connected(); psmove_DEBUG("Found %i ps3eye(s) with CAMERA_CONTROL_USE_PS3EYE_DRIVER.\n", cams); if (cams <= cameraID) { free(cc); return NULL; } if (width <= 0 || height <= 0) { get_metrics(&width, &height); } psmove_DEBUG("Attempting to open ps3eye with cameraId, width, height, framerate: %d, %d, %d, %d.\n", cameraID, width, height, framerate); cc->eye = ps3eye_open(cameraID, width, height, framerate); if (cc->eye == NULL) { psmove_WARNING("Failed to open camera ID %d", cameraID); free(cc); return NULL; } cc->framebgr = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 3); #else // Assume webcam accessible from OpenCV. char *video = psmove_util_get_env_string(PSMOVE_TRACKER_FILENAME_ENV); if (video) { psmove_DEBUG("Using '%s' as video input.\n", video); cc->capture = cvCaptureFromFile(video); free(video); } else { cc->capture = cvCaptureFromCAM(cc->cameraID); if (width <= 0 || height <= 0) { get_metrics(&width, &height); } cvSetCaptureProperty(cc->capture, CV_CAP_PROP_FRAME_WIDTH, width); cvSetCaptureProperty(cc->capture, CV_CAP_PROP_FRAME_HEIGHT, height); } #endif cc->width = width; cc->height = height; cc->deinterlace = PSMove_False; return cc; }
void Run() { int w, h; IplImage *pCapImage; PBYTE pCapBuffer = NULL; // Create camera instance _cam = CLEyeCreateCamera(_cameraGUID, _mode, _resolution, _fps); if(_cam == NULL) return; // Get camera frame dimensions CLEyeCameraGetFrameDimensions(_cam, w, h); // Depending on color mode chosen, create the appropriate OpenCV image if(_mode == CLEYE_COLOR_PROCESSED || _mode == CLEYE_COLOR_RAW) pCapImage = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 4); else pCapImage = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 1); // Set some camera parameters //CLEyeSetCameraParameter(_cam, CLEYE_GAIN, 30); //CLEyeSetCameraParameter(_cam, CLEYE_EXPOSURE, 500); //CLEyeSetCameraParameter(_cam, CLEYE_AUTO_EXPOSURE, false); //CLEyeSetCameraParameter(_cam, CLEYE_AUTO_GAIN, false); //CLEyeSetCameraParameter(_cam, CLEYE_AUTO_WHITEBALANCE, false); //CLEyeSetCameraParameter(_cam, CLEYE_WHITEBALANCE_RED, 100); //CLEyeSetCameraParameter(_cam, CLEYE_WHITEBALANCE_BLUE, 200); //CLEyeSetCameraParameter(_cam, CLEYE_WHITEBALANCE_GREEN, 200); // Start capturing CLEyeCameraStart(_cam); CvMemStorage* storage = cvCreateMemStorage(0); IplImage* hsv_frame = cvCreateImage(cvSize(pCapImage->width, pCapImage->height), IPL_DEPTH_8U, 3); IplImage* thresholded = cvCreateImage(cvSize(pCapImage->width, pCapImage->height), IPL_DEPTH_8U, 1); IplImage* temp = cvCreateImage(cvSize(pCapImage->width >> 1, pCapImage->height >> 1), IPL_DEPTH_8U, 3); // Create a window in which the captured images will be presented cvNamedWindow( "Camera" , CV_WINDOW_AUTOSIZE ); cvNamedWindow( "HSV", CV_WINDOW_AUTOSIZE ); cvNamedWindow( "EdgeDetection", CV_WINDOW_AUTOSIZE ); //int hl = 100, hu = 115, sl = 95, su = 135, vl = 115, vu = 200; int hl = 5, hu = 75, sl = 40, su = 245, vl = 105, vu = 175; // image capturing loop while(_running) { // Detect a red ball CvScalar hsv_min = cvScalar(hl, sl, vl, 0); CvScalar hsv_max = cvScalar(hu, su, vu, 0); cvGetImageRawData(pCapImage, &pCapBuffer); CLEyeCameraGetFrame(_cam, pCapBuffer); cvConvertImage(pCapImage, hsv_frame); // Get one frame if( !pCapImage ) { fprintf( stderr, "ERROR: frame is null...\n" ); getchar(); break; } // Covert color space to HSV as it is much easier to filter colors in the HSV color-space. cvCvtColor(pCapImage, hsv_frame, CV_RGB2HSV); // Filter out colors which are out of range. cvInRangeS(hsv_frame, hsv_min, hsv_max, thresholded); // Memory for hough circles CvMemStorage* storage = cvCreateMemStorage(0); // hough detector works better with some smoothing of the image cvSmooth( thresholded, thresholded, CV_GAUSSIAN, 9, 9 ); CvSeq* circles = cvHoughCircles(thresholded, storage, CV_HOUGH_GRADIENT, 2, thresholded->height/4, 100, 50, 10, 400); for (int i = 0; i < circles->total; i++) { float* p = (float*)cvGetSeqElem( circles, i ); //printf("Ball! x=%f y=%f r=%f\n\r",p[0],p[1],p[2] ); cvCircle( pCapImage, cvPoint(cvRound(p[0]),cvRound(p[1])), 3, CV_RGB(0,255,0), -1, 8, 0 ); cvCircle( pCapImage, cvPoint(cvRound(p[0]),cvRound(p[1])), cvRound(p[2]), CV_RGB(255,0,0), 3, 8, 0 ); } cvShowImage( "Camera", pCapImage ); // Original stream with detected ball overlay cvShowImage( "HSV", hsv_frame); // Original stream in the HSV color space cvShowImage( "EdgeDetection", thresholded ); // The stream after color filtering cvReleaseMemStorage(&storage); // Do not release the frame! //If ESC key pressed, Key=0x10001B under OpenCV 0.9.7(linux version), //remove higher bits using AND operator int key = cvWaitKey(10); switch(key){ case 'q' : hu += 5; break; case 'Q' : hu -= 5; break; case 'a' : hl -= 5; break; case 'A' : hl += 5; break; case 'w' : su += 5; break; case 'W' : su -= 5; break; case 's' : sl -= 5; break; case 'S' : sl += 5; break; case 'e' : vu += 5; break; case 'E' : vu -= 5; break; case 'd' : vl -= 5; break; case 'D' : vl += 5; break; } if (key != -1){ printf("H: %i, S: %i, V: %i\nH: %i, S: %i, V: %i\n\n", hu, su, vu, hl, sl, vl); } } cvReleaseImage(&temp); cvReleaseImage(&pCapImage); // Stop camera capture CLEyeCameraStop(_cam); // Destroy camera object CLEyeDestroyCamera(_cam); // Destroy the allocated OpenCV image cvReleaseImage(&pCapImage); _cam = NULL; }
//-------------------------------------------------------------- bool ofxCLEye::initGrabber(int w, int h, int deviceID, int frameRate, bool useTexture, bool useGrayscale, bool useThread){ setDeviceID(deviceID); setDesiredFrameRate(frameRate); setUseThread(useThread); setUseGrayscale(useGrayscale); setUseTexture(useTexture); if(w == 640 && h == 480){ resolution = CLEYE_VGA; } else if(w == 320 && h == 240){ resolution = CLEYE_QVGA; } else{ ofLogWarning(OFX_CLEYE_MODULE_NAME) << "initGrabber(): selected resolution " + ofToString(w) + "x" + ofToString(h) + " is not available with ofxCLEye"; ofLogWarning(OFX_CLEYE_MODULE_NAME) << "initGrabber(): using 640x480 instead"; resolution = CLEYE_VGA; } if(desiredFrameRate < 0){ ofLogWarning(OFX_CLEYE_MODULE_NAME) << "initGrabber(): selected framerate" + ofToString(desiredFrameRate) + "is not available with ofxCLeye"; ofLogWarning(OFX_CLEYE_MODULE_NAME) << "initGrabber(): using 60fps instead"; desiredFrameRate = 60; } GUID guid = getDeviceGUID(requestedDeviceID); cam = CLEyeCreateCamera(guid, colorMode, resolution, desiredFrameRate); if(cam == NULL){ ofLogError(OFX_CLEYE_MODULE_NAME) << "initGrabber(): error when creating instance of CLEyeCamera."; return false; } initialized = CLEyeCameraStart(cam); if(!initialized){ ofLogError(OFX_CLEYE_MODULE_NAME) << "initGrabber(): can't start the CLEye camera."; return false; } CLEyeCameraGetFrameDimensions(cam, width, height); // oF code style says to not use ternary operators, but sometimes they are really convenient. // Native color image from camera is RGBA (4 channels) viPixels = new unsigned char[width * height * ((colorMode == CLEYE_MONO_PROCESSED) ? 1 : 4)]; pixels.allocate(width, height, (colorMode == CLEYE_MONO_PROCESSED) ? 1 : 3); pixels.set(0); if(usingTexture){ int glFormat = (colorMode == CLEYE_MONO_PROCESSED) ? GL_LUMINANCE : GL_RGB; texture.allocate(width, height, glFormat); texture.loadData((unsigned char *)pixels.getPixels(), width, height, glFormat); } if(usingThread){ startThread(true); ofLogVerbose(OFX_CLEYE_MODULE_NAME) << "initGrabber(): thread started."; } return true; }