bool PS3EyeTracker::open(const DeviceEnumerator *enumerator) { const TrackerDeviceEnumerator *tracker_enumerator = static_cast<const TrackerDeviceEnumerator *>(enumerator); const char *cur_dev_path = tracker_enumerator->get_path(); bool bSuccess = false; if (getIsOpen()) { SERVER_LOG_WARNING("PS3EyeTracker::open") << "PS3EyeTracker(" << cur_dev_path << ") already open. Ignoring request."; bSuccess = true; } else { const int camera_index = tracker_enumerator->get_camera_index(); SERVER_LOG_INFO("PS3EyeTracker::open") << "Opening PS3EyeTracker(" << cur_dev_path << ", camera_index=" << camera_index << ")"; VideoCapture = new PSEyeVideoCapture(camera_index); if (VideoCapture->isOpened()) { CaptureData = new PSEyeCaptureData; USBDevicePath = enumerator->get_path(); bSuccess = true; } else { SERVER_LOG_ERROR("PS3EyeTracker::open") << "Failed to open PS3EyeTracker(" << cur_dev_path << ", camera_index=" << camera_index << ")"; close(); } } if (bSuccess) { std::string identifier = VideoCapture->getUniqueIndentifier(); std::string config_name = "PS3EyeTrackerConfig_"; config_name.append(identifier); cfg = PS3EyeTrackerConfig(config_name); cfg.load(); setExposure(cfg.exposure); setGain(cfg.gain); } return bSuccess; }
//============================================================================= // Connection //============================================================================= bool TOFApp::connect(DepthCamera::FrameType frmType) { const vector<DevicePtr> &devices = _sys.scan(); if (devices.size() > 0) { _depthCamera = _sys.connect(devices[0]); if (!_depthCamera) return false; if (!_depthCamera->isInitialized()) return false; } else return false; _frameType = frmType; _depthCamera->registerCallback(_frameType, frameCallback); _depthCamera->setFrameSize(_dimen); if (setProfile(_profile)) cout << "Profile " << _profile << " found." << endl; else cout << "Profile " << _profile << "not found." << endl; setIllumPower(DEFAULT_ILLUM_POWER); setExposure(DEFAULT_EXPOSURE); #if 0 FilterPtr p = _sys.createFilter("Voxel::MedianFilter", DepthCamera::FRAME_RAW_FRAME_PROCESSED); if (!p) { logger(LOG_ERROR) << "Failed to get MedianFilter" << std::endl; return false; } //p->set("deadband", 0.1f); _depthCamera->addFilter(p, DepthCamera::FRAME_RAW_FRAME_PROCESSED, 0); p = _sys.createFilter("Voxel::TemporalMedianFilter", DepthCamera::FRAME_RAW_FRAME_PROCESSED); if (!p) { logger(LOG_ERROR) << "Failed to get TemporalMedianFilter" << std::endl; return false; } //p->set("deadband", 0.01f); _depthCamera->addFilter(p, DepthCamera::FRAME_RAW_FRAME_PROCESSED, 0); #endif _depthCamera->start(); _isConnected = true; return true; }
void UIRpiCam::guiEvent(ofxUIEventArgs &e){ string name = e.widget->getName(); if( name == "LOAD" ){ if(bLoading){ load(); } } else if (name == "HOST"){ host = textField->getTextString(); } else if(name == "EXPO_OFF"){ setExposure(EXPO_OFF); } else if(name == "EXPO_AUTO"){ setExposure(EXPO_AUTO); } else if(name == "EXPO_NIGH"){ setExposure(EXPO_NIGH); } else if(name == "EXPO_NIGHT_PREVIEW"){ setExposure(EXPO_NIGHT_PREVIEW); } else if(name == "EXPO_BLACK_LIGHT"){ setExposure(EXPO_BLACK_LIGHT); } else if(name == "EXPO_SPOT_LIGHT"){ setExposure(EXPO_SPOT_LIGHT); } else if(name == "EXPO_SPORT"){ setExposure(EXPO_SPORT); } else if(name == "EXPO_SNOW"){ setExposure(EXPO_SNOW); } else if(name == "EXPO_BEACH"){ setExposure(EXPO_BEACH); } else if(name == "EXPO_VERY_LONG"){ setExposure(EXPO_VERY_LONG); } else if(name == "EXPO_FIXED_FPS"){ setExposure(EXPO_FIXED_FPS); } else if(name == "EXPO_ANTI_SHAKE"){ setExposure(EXPO_ANTI_SHAKE); } else if(name == "EXPO_FIREWORKS"){ setExposure(EXPO_FIREWORKS); } else if(name == "AWB_OFF"){ setAWD(AWB_OFF); } else if(name == "AWB_AUTO"){ setAWD(AWB_AUTO); } else if(name == "AWB_OFF"){ setAWD(AWB_OFF); } else if(name == "AWB_SUN"){ setAWD(AWB_SUN); } else if(name == "AWB_CLOUD"){ setAWD(AWB_CLOUD); } else if(name == "AWB_SHADE"){ setAWD(AWB_SHADE); } else if(name == "AWB_TUNGSTEN"){ setAWD(AWB_TUNGSTEN); } else if(name == "AWB_FLUORESCENT"){ setAWD(AWB_FLUORESCENT); } else if(name == "AWB_INCADESCENT"){ setAWD(AWB_INCADESCENT); } else if(name == "AWB_FLASH"){ setAWD(AWB_FLASH); } else if(name == "AWB_HORIZON"){ setAWD(AWB_HORIZON); } else if(name == "FX_NONE"){ setEffect(FX_NONE); } else if(name == "FX_NEGATIVE"){ setEffect(FX_NEGATIVE); } else if(name == "FX_SOLARISE"){ setEffect(FX_SOLARISE); } else if(name == "FX_SKETCH"){ setEffect(FX_SKETCH); } else if(name == "FX_DENOISE"){ setEffect(FX_DENOISE); } else if(name == "FX_EMBOS"){ setEffect(FX_EMBOS); } else if(name == "FX_OILPAINT"){ setEffect(FX_OILPAINT); } else if(name == "FX_HATCH"){ setEffect(FX_HATCH); } else if(name == "FX_GPEN"){ setEffect(FX_GPEN); } else if(name == "FX_PASTEL"){ setEffect(FX_PASTEL); } else if(name == "FX_WATERCOLOUR"){ setEffect(FX_WATERCOLOUR); } else if(name == "FX_FILM"){ setEffect(FX_FILM); } else if(name == "FX_BLUR"){ setEffect(FX_BLUR); } else if(name == "FX_SATURATION"){ setEffect(FX_SATURATION); } else if(name == "FX_COLOURSWAP"){ setEffect(FX_COLOURSWAP); } else if(name == "FX_WASHEDOUT"){ setEffect(FX_WASHEDOUT); } else if(name == "FX_POSTERISE"){ setEffect(FX_POSTERISE); } else if(name == "FX_COLOURPOINT"){ setEffect(FX_COLOURPOINT); } else if(name == "FX_COLOURBALANCE"){ setEffect(FX_COLOURBALANCE); } else if(name == "FX_CARTOON"){ setEffect(FX_CARTOON); } else if(name == "FX_NONE"){ setEffect(FX_NONE); } else if(name == "FX_NONE"){ setEffect(FX_NONE); } }
QWidget * QCamQHY5::buildGUI(QWidget * parent) { QWidget* remoteCTRL=QCam::buildGUI(parent); QCamVGroupBox* settingsBox=new QCamVGroupBox(QString("Settings"),remoteCTRL); QCamHBox* filtersBox=new QCamHBox(settingsBox); // denoise denoiseBox=new QCheckBox("Horizontal bands filtering",filtersBox); // read denoise settings if(settings.haveKey("QHY5_BANDS_FILTER")&&(strcasecmp(settings.getKey("QHY5_BANDS_FILTER"),"yes")==0)) { denoise_=TRUE; denoiseBox->setChecked(TRUE); } else { denoise_=FALSE; denoiseBox->setChecked(FALSE); } // gain gainSlider=new QCamSlider("Gain",false,settingsBox,0,81,false,false); gainSlider->setValue(gainG1_); gainSliderG1=new QCamSlider("Gain Green 1",false,settingsBox,0,81,false,false); gainSliderG1->setValue(gainG1_); gainSliderG2=new QCamSlider("Gain Green 2",false,settingsBox,0,81,false,false); gainSliderG2->setValue(gainG2_); gainSliderR=new QCamSlider("Gain Red",false,settingsBox,0,81,false,false); gainSliderR->setValue(gainR_); gainSliderB=new QCamSlider("Gain Blue",false,settingsBox,0,81,false,false); gainSliderB->setValue(gainB_); // no finished at this time, so hidden gainSliderG1->hide(); gainSliderG2->hide(); gainSliderR->hide(); gainSliderB->hide(); // exposure QCamHBox* exposureBox=new QCamHBox(settingsBox); QLabel* label1=new QLabel(QString("Exposure"),exposureBox); exposureSlider=new QSlider(Qt::Horizontal,exposureBox); exposureSlider->setMinimum(0); exposureSlider->setMaximum(QHY5_EXPOSURE_TABLE_SIZE-1); exposureSlider->setValue(getExposureIndex(frameExposure_)); exposureSlider->setTickPosition(QSlider::TicksBelow); exposureSlider->setTickInterval(1); exposureValue=new QLabel(exposureBox); exposureValue->setMinimumWidth(80); // update value int transferTime=1558*(height_+26)/PIXEL_RATE; if(transferTime>frameExposure_) { exposureValue->setText(QString().sprintf("%2i fps (max)",(int)(1.0/(float)transferTime*1000))); } else { if(frameExposure_<1000) exposureValue->setText(QString().sprintf("%2i fps",(int)(1.0/(float)frameExposure_*1000))); else exposureValue->setText(QString().sprintf("%2.1f s",((float)frameExposure_/1000))); } // if exposure > 1000ms, read the frame and start a new one // count the usb transfer time. Rate is 24M pixels / second shootMode_=(frameExposure_<1000); int poseTime=frameExposure_-(1558*(height_+26)/PIXEL_RATE); if(poseTime<0) poseTime=0; camera->stop(); camera->shoot(poseTime,shootMode_); shooting_=TRUE; // progress bar QCamHBox* progressBox=new QCamHBox(settingsBox); QLabel* label3=new QLabel(QString("Progress"),progressBox); progressBar=new QProgressBar(progressBox); if(frameExposure_>(3*PROGRESS_TIME)) { progressBar->setEnabled(true); progressBar->setMinimum(0); progressBar->setMaximum((int)((float)(frameExposure_+30)/PROGRESS_TIME)); progress_=0; } else { progressBar->setEnabled(false); } // tooltips denoiseBox->setToolTip(tr("Denoise the horizontal bands using the edge black pixels")); gainSlider->setToolTip(tr("Camera's gain, non linear, 0 to 81 , 5 means x1 (patterns after 52)")); exposureSlider->setToolTip(tr("Camera's exposure, may be limited by the frame sizes")); exposureValue->setToolTip(tr("Exposure time (not a real fps, not very accurate for high rates)")); // connections connect(denoiseBox,SIGNAL(toggled(bool)),this,SLOT(denoiseChange(bool))); connect(gainSlider,SIGNAL(valueChange(int)),this,SLOT(changeGain(int))); connect(gainSlider,SIGNAL(sliderReleased()),this,SLOT(setGain())); connect(gainSliderG1,SIGNAL(valueChange(int)),this,SLOT(changeGainG1(int))); connect(gainSliderG1,SIGNAL(sliderReleased()),this,SLOT(setGain())); connect(gainSliderG2,SIGNAL(valueChange(int)),this,SLOT(changeGainG2(int))); connect(gainSliderG2,SIGNAL(sliderReleased()),this,SLOT(setGain())); connect(gainSliderR,SIGNAL(valueChange(int)),this,SLOT(changeGainR(int))); connect(gainSliderR,SIGNAL(sliderReleased()),this,SLOT(setGain())); connect(gainSliderB,SIGNAL(valueChange(int)),this,SLOT(changeGainB(int))); connect(gainSliderB,SIGNAL(sliderReleased()),this,SLOT(setGain())); connect(exposureSlider,SIGNAL(valueChanged(int)),this,SLOT(changeExposure(int))); connect(exposureSlider,SIGNAL(sliderReleased()),this,SLOT(setExposure())); cerr << "QHY5 ready..." << endl; // set the first timer shot timer_=new QTimer(this); connect(timer_,SIGNAL(timeout()),this,SLOT(updateFrame())); timer_->setSingleShot(true); timer_->start(frameExposure_); // progress timer progressTimer_=new QTimer(this); progressTimer_->start(PROGRESS_TIME); connect(progressTimer_,SIGNAL(timeout()),this,SLOT(progressUpdate())); return remoteCTRL; }
void DC1394Camera::init(core::ConfigNode config, uint64_t guid) { LOGGER.infoStream() << guid << ": Initializing camera "; m_guid = guid; // Grab our camera m_camera = dc1394_camera_new(s_libHandle, guid); if (!m_camera) { LOGGER.errorStream() << "Failed to initialize camera with guid: " << guid; assert(m_camera && "Couldn't initialize camera"); } // Determines settings and frame size dc1394error_t err = DC1394_FAILURE; dc1394video_mode_t videoMode = DC1394_VIDEO_MODE_640x480_RGB8; dc1394framerate_t frameRate = DC1394_FRAMERATE_15; // Check for the whitebalance feature dc1394feature_info_t whiteBalance; whiteBalance.id = DC1394_FEATURE_WHITE_BALANCE; err = dc1394_feature_get(m_camera, &whiteBalance); assert(DC1394_SUCCESS == err && "Could not get white balance feature info"); bool hasWhiteBalance = (whiteBalance.available == DC1394_TRUE); uint32_t uValue, vValue; if (hasWhiteBalance) { err = dc1394_feature_whitebalance_get_value(m_camera, &uValue, &vValue); LOGGER.infoStream() << m_guid << ": Current U: " << uValue << " V: " << vValue; } // Actually set the values if the user wants to if (config.exists("uValue") && config.exists("vValue")) { bool uAuto = boost::to_lower_copy(config["uValue"].asString("auto")) == "auto"; bool vAuto = boost::to_lower_copy(config["vValue"].asString("auto")) == "auto"; bool autoVal = uAuto && vAuto; if ((uAuto || vAuto) && !autoVal) { assert(false && "Both Whitebalance values must either be auto or manual"); } if (autoVal) { setWhiteBalance(0, 0, true); } else { // Read in config values uint32_t u_b_value = static_cast<uint32_t>(config["uValue"].asInt()); uint32_t v_r_value = static_cast<uint32_t>(config["vValue"].asInt()); // Set values setWhiteBalance(u_b_value, v_r_value); } } else if (config.exists("uValue") || config.exists("vValue")) { assert(false && "Must set both the U and V values for white balance"); } err = dc1394_feature_whitebalance_get_value(m_camera, &uValue, &vValue); LOGGER.infoStream() << m_guid << ": Set U: " << uValue << " V: " << vValue; if (config.exists("brightness")) { // Read in and set values if (boost::to_lower_copy(config["brightness"].asString("auto")) == "auto") { setBrightness(0, true); } else { uint32_t value = static_cast<uint32_t>(config["brightness"].asInt()); setBrightness(value); } } if (config.exists("exposure")) { // Read in and set values if (boost::to_lower_copy(config["exposure"].asString("auto")) == "auto") { setExposure(0, true); } else { uint32_t value = static_cast<uint32_t>(config["exposure"].asInt()); setExposure(value); } } // Grab image size err = dc1394_get_image_size_from_video_mode(m_camera, videoMode, &m_width, &m_height); assert(DC1394_SUCCESS == err && "Could not get image size"); float fRate; err = dc1394_framerate_as_float(frameRate, &fRate); assert(DC1394_SUCCESS == err && "Could not get framerate as float"); m_fps = fRate; // Setup the capture err = dc1394_video_set_iso_speed(m_camera, DC1394_ISO_SPEED_400); assert(DC1394_SUCCESS == err && "Could not set iso speed"); err = dc1394_video_set_mode(m_camera, videoMode); assert(DC1394_SUCCESS == err && "Could not set video mode"); err = dc1394_video_set_framerate(m_camera, frameRate); assert(DC1394_SUCCESS == err && "Could not set framerate"); // Start data transfer err = dc1394_video_set_transmission(m_camera, DC1394_ON); assert(DC1394_SUCCESS == err && "Could not start camera iso transmission"); err = dc1394_capture_setup(m_camera, DMA_BUFFER_SIZE, DC1394_CAPTURE_FLAGS_DEFAULT); assert(DC1394_SUCCESS == err && "Could not setup camera make sure" " that the video mode and framerate are supported by your camera"); }