static void gst_aravis_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { GstAravis *gst_aravis = GST_ARAVIS (object); switch (prop_id) { case PROP_CAMERA_NAME: g_free (gst_aravis->camera_name); /* check if we are currently active prevent setting camera and other values to something not representing the active camera */ if (gst_aravis->stream == NULL) { gst_aravis->camera_name = g_strdup (g_value_get_string (value)); gst_aravis_init_camera (gst_aravis); } GST_LOG_OBJECT (gst_aravis, "Set camera name to %s", gst_aravis->camera_name); break; case PROP_GAIN: gst_aravis->gain = g_value_get_double (value); if (gst_aravis->camera != NULL) arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain); break; case PROP_GAIN_AUTO: gst_aravis->gain_auto = g_value_get_boolean (value); if (gst_aravis->camera != NULL) arv_camera_set_gain_auto (gst_aravis->camera, gst_aravis->gain_auto); break; case PROP_EXPOSURE: gst_aravis->exposure_time_us = g_value_get_double (value); if (gst_aravis->camera != NULL) arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us); break; case PROP_EXPOSURE_AUTO: gst_aravis->exposure_auto = g_value_get_boolean (value); if (gst_aravis->camera != NULL) arv_camera_set_exposure_time_auto (gst_aravis->camera, gst_aravis->exposure_auto); break; case PROP_OFFSET_X: gst_aravis->offset_x = g_value_get_int (value); break; case PROP_OFFSET_Y: gst_aravis->offset_y = g_value_get_int (value); break; case PROP_H_BINNING: gst_aravis->h_binning = g_value_get_int (value); break; case PROP_V_BINNING: gst_aravis->v_binning = g_value_get_int (value); break; case PROP_PACKET_RESEND: gst_aravis->packet_resend = g_value_get_boolean (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; } }
CamGigE::CamGigE(int id): Cam(id) { mWidth = XSIZE; mHeight = YSIZE; xshift = 0; yshift = 0; exposure = 2*1000; gain = 300; isCapturing = false; framerate = 15.0f; mId = id; g_type_init (); camera = arv_camera_new(NULL); if(camera == NULL) { throw "No camera found"; } arv_camera_set_region(camera, xshift, yshift, mWidth, mHeight); arv_camera_set_exposure_time(camera, exposure); arv_camera_set_gain(camera, gain); arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_MONO_8); //arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_BAYER_BG_8); width = (int)mWidth; height = (int)mHeight; arv_camera_get_region (camera, &xshift, &yshift, &width, &height); payload = arv_camera_get_payload (camera); stream = arv_camera_create_stream (camera, NULL, NULL); if(stream == NULL) { throw "Cannot create stream"; } g_object_set(stream, "packet-timeout", (unsigned) 20 * 1000, "frame-retention", (unsigned) 100 * 1000, NULL); for(int i = 0; i < 4; i++) arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL)); arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_CONTINUOUS); arv_camera_set_frame_rate(camera, framerate); rawdata = new unsigned char[width*height]; }
bool CvCaptureCAM_Aravis::setProperty( int property_id, double value ) { switch ( property_id ) { case CV_CAP_PROP_EXPOSURE: if(exposureAvailable) { /* exposure time in seconds, like 1/100 s */ value *= 1e6; // -> from s to us arv_camera_set_exposure_time(camera, exposure = BETWEEN(value, exposureMin, exposureMax)); break; } else return false; case CV_CAP_PROP_FPS: if(fpsAvailable) { arv_camera_set_frame_rate(camera, fps = BETWEEN(value, fpsMin, fpsMax)); break; } else return false; case CV_CAP_PROP_GAIN: if(gainAvailable) { arv_camera_set_gain(camera, gain = BETWEEN(value, gainMin, gainMax)); break; } else return false; case CV_CAP_PROP_FOURCC: { ArvPixelFormat newFormat = pixelFormat; switch((int)value) { case MODE_GRAY8: newFormat = ARV_PIXEL_FORMAT_MONO_8; break; case MODE_GRAY12: newFormat = ARV_PIXEL_FORMAT_MONO_12; break; } if(newFormat != pixelFormat) { stopCapture(); arv_camera_set_pixel_format(camera, pixelFormat = newFormat); startCapture(); } } break; default: return false; } return true; }
static gboolean gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps) { GstAravis* gst_aravis = GST_ARAVIS(src); GstStructure *structure; ArvPixelFormat pixel_format; int height, width; int bpp, depth; const GValue *frame_rate; const char *caps_string; unsigned int i; guint32 fourcc; GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps); arv_camera_stop_acquisition (gst_aravis->camera); if (gst_aravis->stream != NULL) g_object_unref (gst_aravis->stream); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); frame_rate = gst_structure_get_value (structure, "framerate"); gst_structure_get_int (structure, "bpp", &bpp); gst_structure_get_int (structure, "depth", &depth); if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) { const char *string; string = gst_structure_get_string (structure, "format"); fourcc = GST_STR_FOURCC (string); } else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) { gst_structure_get_fourcc (structure, "format", &fourcc); } else fourcc = 0; pixel_format = arv_pixel_format_from_gst_0_10_caps (gst_structure_get_name (structure), bpp, depth, fourcc); arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height); arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning); arv_camera_set_pixel_format (gst_aravis->camera, pixel_format); if (frame_rate != NULL) { double dbl_frame_rate; dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) / (double) gst_value_get_fraction_denominator (frame_rate); GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate); arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate); if (dbl_frame_rate > 0.0) gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT, 3e6 / dbl_frame_rate); else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; } else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %" G_GUINT64_FORMAT " µs", gst_aravis->buffer_timeout_us); GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera)); if(gst_aravis->gain_auto) { arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous"); } else { if (gst_aravis->gain >= 0) { GST_DEBUG_OBJECT (gst_aravis, "Gain = %g", gst_aravis->gain); arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain); } GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %g", arv_camera_get_gain (gst_aravis->camera)); } if(gst_aravis->exposure_auto) { arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = continuous"); } else { if (gst_aravis->exposure_time_us > 0.0) { GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us); arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us); } GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera)); } if (gst_aravis->fixed_caps != NULL) gst_caps_unref (gst_aravis->fixed_caps); caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_format); if (caps_string != NULL) { GstStructure *structure; GstCaps *caps; caps = gst_caps_new_empty (); structure = gst_structure_from_string (caps_string, NULL); gst_structure_set (structure, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (frame_rate != NULL) gst_structure_set_value (structure, "framerate", frame_rate); gst_caps_append_structure (caps, structure); gst_aravis->fixed_caps = caps; } else gst_aravis->fixed_caps = NULL; gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera); gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL); if (ARV_IS_GV_STREAM (gst_aravis->stream) && gst_aravis->packet_resend) g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL); else g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++) arv_stream_push_buffer (gst_aravis->stream, arv_buffer_new (gst_aravis->payload, NULL)); GST_LOG_OBJECT (gst_aravis, "Start acquisition"); arv_camera_start_acquisition (gst_aravis->camera); gst_aravis->timestamp_offset = 0; gst_aravis->last_timestamp = 0; return TRUE; }
bool CvCaptureCAM_Aravis::setProperty( int property_id, double value ) { switch(property_id) { case CV_CAP_PROP_AUTO_EXPOSURE: if(exposureAvailable || gainAvailable) { if( (controlExposure = (bool)(int)value) ) { exposure = exposureAvailable ? arv_camera_get_exposure_time(camera) : 0; gain = gainAvailable ? arv_camera_get_gain(camera) : 0; } } break; case CV_CAP_PROP_BRIGHTNESS: exposureCompensation = CLIP(value, -3., 3.); break; case CV_CAP_PROP_EXPOSURE: if(exposureAvailable) { /* exposure time in seconds, like 1/100 s */ value *= 1e6; // -> from s to us arv_camera_set_exposure_time(camera, exposure = CLIP(value, exposureMin, exposureMax)); break; } else return false; case CV_CAP_PROP_FPS: if(fpsAvailable) { arv_camera_set_frame_rate(camera, fps = CLIP(value, fpsMin, fpsMax)); break; } else return false; case CV_CAP_PROP_GAIN: if(gainAvailable) { if ( (autoGain = (-1 == value) ) ) break; arv_camera_set_gain(camera, gain = CLIP(value, gainMin, gainMax)); break; } else return false; case CV_CAP_PROP_FOURCC: { ArvPixelFormat newFormat = pixelFormat; switch((int)value) { case MODE_GREY: case MODE_Y800: newFormat = ARV_PIXEL_FORMAT_MONO_8; targetGrey = 128; break; case MODE_Y12: newFormat = ARV_PIXEL_FORMAT_MONO_12; targetGrey = 2048; break; case MODE_Y16: newFormat = ARV_PIXEL_FORMAT_MONO_16; targetGrey = 32768; break; case MODE_GRBG: newFormat = ARV_PIXEL_FORMAT_BAYER_GR_8; targetGrey = 128; break; } if(newFormat != pixelFormat) { stopCapture(); arv_camera_set_pixel_format(camera, pixelFormat = newFormat); startCapture(); } } break; case CV_CAP_PROP_BUFFERSIZE: { int x = (int)value; if((x > 0) && (x != num_buffers)) { stopCapture(); num_buffers = x; startCapture(); } } break; default: return false; } return true; }
void CvCaptureCAM_Aravis::autoExposureControl(IplImage* image) { // Software control of exposure parameters utilizing // automatic change of exposure time & gain // Priority is set as follows: // - to increase brightness, first increase time then gain // - to decrease brightness, first decrease gain then time cv::Mat m = cv::cvarrToMat(image); // calc mean value for luminance or green channel double brightness = cv::mean(m)[image->nChannels > 1 ? 1 : 0]; if(brightness < 1) brightness = 1; // mid point - 100 % means no change static const double dmid = 100; // distance from optimal value as a percentage double d = (targetGrey * dmid) / brightness; if(d >= dmid) d = ( d + (dmid * 2) ) / 3; prevFrameID = frameID; midGrey = brightness; double maxe = 1e6 / fps; double ne = CLIP( ( exposure * d ) / ( dmid * pow(sqrt(2), -2 * exposureCompensation) ), exposureMin, maxe); // if change of value requires intervention if(std::fabs(d-dmid) > 5) { double ev, ng = 0; if(gainAvailable && autoGain) { ev = log( d / dmid ) / log(2); ng = CLIP( gain + ev + exposureCompensation, gainMin, gainMax); if( ng < gain ) { // priority 1 - reduce gain arv_camera_set_gain(camera, (gain = ng)); return; } } if(exposureAvailable) { // priority 2 - control of exposure time if(std::fabs(exposure - ne) > 2) { // we have not yet reach the max-e level arv_camera_set_exposure_time(camera, (exposure = ne) ); return; } } if(gainAvailable && autoGain) { if(exposureAvailable) { // exposure at maximum - increase gain if possible if(ng > gain && ng < gainMax && ne >= maxe) { arv_camera_set_gain(camera, (gain = ng)); return; } } else { // priority 3 - increase gain arv_camera_set_gain(camera, (gain = ng)); return; } } } // if gain can be reduced - do it if(gainAvailable && autoGain && exposureAvailable) { if(gain > gainMin && exposure < maxe) { exposure = CLIP( ne * 1.05, exposureMin, maxe); arv_camera_set_exposure_time(camera, exposure ); } } }
static gboolean gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps) { GstAravis* gst_aravis = GST_ARAVIS(src); GstStructure *structure; ArvPixelFormat pixel_format; int height, width; int bpp, depth; const GValue *frame_rate; const char *caps_string; unsigned int i; guint32 fourcc; GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps); arv_camera_stop_acquisition (gst_aravis->camera); if (gst_aravis->stream != NULL) g_object_unref (gst_aravis->stream); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); frame_rate = gst_structure_get_value (structure, "framerate"); gst_structure_get_fourcc (structure, "format", &fourcc); gst_structure_get_int (structure, "bpp", &bpp); gst_structure_get_int (structure, "depth", &depth); pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc); arv_camera_set_region (gst_aravis->camera, 0, 0, width, height); arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning); arv_camera_set_pixel_format (gst_aravis->camera, pixel_format); if (frame_rate != NULL) { double dbl_frame_rate; dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) / (double) gst_value_get_fraction_denominator (frame_rate); GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate); arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate); if (dbl_frame_rate > 0.0) gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT, 3e6 / dbl_frame_rate); else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; } else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us); GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera)); GST_DEBUG_OBJECT (gst_aravis, "Gain = %d", gst_aravis->gain); arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain); GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %d", arv_camera_get_gain (gst_aravis->camera)); GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us); arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us); GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera)); if (gst_aravis->fixed_caps != NULL) gst_caps_unref (gst_aravis->fixed_caps); caps_string = arv_pixel_format_to_gst_caps_string (pixel_format); if (caps_string != NULL) { GstStructure *structure; GstCaps *caps; caps = gst_caps_new_empty (); structure = gst_structure_from_string (caps_string, NULL); gst_structure_set (structure, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (frame_rate != NULL) gst_structure_set_value (structure, "framerate", frame_rate); gst_caps_append_structure (caps, structure); gst_aravis->fixed_caps = caps; } else gst_aravis->fixed_caps = NULL; gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera); gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL); for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++) arv_stream_push_buffer (gst_aravis->stream, arv_buffer_new (gst_aravis->payload, NULL)); GST_LOG_OBJECT (gst_aravis, "Start acquisition"); arv_camera_start_acquisition (gst_aravis->camera); gst_aravis->timestamp_offset = 0; gst_aravis->last_timestamp = 0; return TRUE; }
int main(int argc, char** argv) { char *pszGuid = NULL; char szGuid[512]; int nInterfaces = 0; int nDevices = 0; int i = 0; const char *pkeyAcquisitionFrameRate[2] = {"AcquisitionFrameRate", "AcquisitionFrameRateAbs"}; ArvGcNode *pGcNode; GError *error=NULL; global.bCancel = FALSE; global.config = global.config.__getDefault__(); global.idSoftwareTriggerTimer = 0; ros::init(argc, argv, "camera"); global.phNode = new ros::NodeHandle(); // Service callback for firing nuc's. // Needed since we cannot open another connection to cameras while streaming. ros::NodeHandle nh; ros::ServiceServer NUCservice = nh.advertiseService("FireNUC", NUCService_callback); //g_type_init (); // Print out some useful info. ROS_INFO ("Attached cameras:"); arv_update_device_list(); nInterfaces = arv_get_n_interfaces(); ROS_INFO ("# Interfaces: %d", nInterfaces); nDevices = arv_get_n_devices(); ROS_INFO ("# Devices: %d", nDevices); for (i=0; i<nDevices; i++) ROS_INFO ("Device%d: %s", i, arv_get_device_id(i)); if (nDevices>0) { // Get the camera guid from either the command-line or as a parameter. if (argc==2) { strcpy(szGuid, argv[1]); pszGuid = szGuid; } else { if (global.phNode->hasParam(ros::this_node::getName()+"/guid")) { std::string stGuid; global.phNode->getParam(ros::this_node::getName()+"/guid", stGuid); strcpy (szGuid, stGuid.c_str()); pszGuid = szGuid; } else pszGuid = NULL; } // Open the camera, and set it up. ROS_INFO("Opening: %s", pszGuid ? pszGuid : "(any)"); while (TRUE) { global.pCamera = arv_camera_new(pszGuid); if (global.pCamera) break; else { ROS_WARN ("Could not open camera %s. Retrying...", pszGuid); ros::Duration(1.0).sleep(); ros::spinOnce(); } } global.pDevice = arv_camera_get_device(global.pCamera); ROS_INFO("Opened: %s-%s", arv_device_get_string_feature_value (global.pDevice, "DeviceVendorName"), arv_device_get_string_feature_value (global.pDevice, "DeviceID")); // See if some basic camera features exist. pGcNode = arv_device_get_feature (global.pDevice, "AcquisitionMode"); global.isImplementedAcquisitionMode = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "GainRaw"); global.isImplementedGain = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "Gain"); global.isImplementedGain |= ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "ExposureTimeAbs"); global.isImplementedExposureTimeAbs = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "ExposureAuto"); global.isImplementedExposureAuto = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "GainAuto"); global.isImplementedGainAuto = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "TriggerSelector"); global.isImplementedTriggerSelector = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "TriggerSource"); global.isImplementedTriggerSource = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "TriggerMode"); global.isImplementedTriggerMode = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "FocusPos"); global.isImplementedFocusPos = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "GevSCPSPacketSize"); global.isImplementedMtu = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "AcquisitionFrameRateEnable"); global.isImplementedAcquisitionFrameRateEnable = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; // Find the key name for framerate. global.keyAcquisitionFrameRate = NULL; for (i=0; i<2; i++) { pGcNode = arv_device_get_feature (global.pDevice, pkeyAcquisitionFrameRate[i]); global.isImplementedAcquisitionFrameRate = pGcNode ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; if (global.isImplementedAcquisitionFrameRate) { global.keyAcquisitionFrameRate = pkeyAcquisitionFrameRate[i]; break; } } // Get parameter bounds. arv_camera_get_exposure_time_bounds (global.pCamera, &global.configMin.ExposureTimeAbs, &global.configMax.ExposureTimeAbs); arv_camera_get_gain_bounds (global.pCamera, &global.configMin.Gain, &global.configMax.Gain); arv_camera_get_sensor_size (global.pCamera, &global.widthSensor, &global.heightSensor); arv_camera_get_width_bounds (global.pCamera, &global.widthRoiMin, &global.widthRoiMax); arv_camera_get_height_bounds (global.pCamera, &global.heightRoiMin, &global.heightRoiMax); if (global.isImplementedFocusPos) { gint64 focusMin64, focusMax64; arv_device_get_integer_feature_bounds (global.pDevice, "FocusPos", &focusMin64, &focusMax64); global.configMin.FocusPos = focusMin64; global.configMax.FocusPos = focusMax64; } else { global.configMin.FocusPos = 0; global.configMax.FocusPos = 0; } global.configMin.AcquisitionFrameRate = 0.0; global.configMax.AcquisitionFrameRate = 1000.0; // Initial camera settings. if (global.isImplementedExposureTimeAbs) arv_device_set_float_feature_value(global.pDevice, "ExposureTimeAbs", global.config.ExposureTimeAbs); if (global.isImplementedGain) arv_camera_set_gain(global.pCamera, global.config.Gain); //arv_device_set_integer_feature_value(global.pDevice, "GainRaw", global.config.GainRaw); if (global.isImplementedAcquisitionFrameRateEnable) arv_device_set_integer_feature_value(global.pDevice, "AcquisitionFrameRateEnable", 1); if (global.isImplementedAcquisitionFrameRate) arv_device_set_float_feature_value(global.pDevice, global.keyAcquisitionFrameRate, global.config.AcquisitionFrameRate); // Set up the triggering. if (global.isImplementedTriggerMode) { if (global.isImplementedTriggerSelector && global.isImplementedTriggerMode) { arv_device_set_string_feature_value(global.pDevice, "TriggerSelector", "AcquisitionStart"); arv_device_set_string_feature_value(global.pDevice, "TriggerMode", "Off"); arv_device_set_string_feature_value(global.pDevice, "TriggerSelector", "FrameStart"); arv_device_set_string_feature_value(global.pDevice, "TriggerMode", "Off"); } } WriteCameraFeaturesFromRosparam (); #ifdef TUNING ros::Publisher pubInt64 = global.phNode->advertise<std_msgs::Int64>(ros::this_node::getName()+"/dt", 100); global.ppubInt64 = &pubInt64; #endif // Grab the calibration file url from the param server if exists std::string calibrationURL = ""; // Default looks in .ros/camera_info if (!(ros::param::get(std::string("calibrationURL").append(arv_device_get_string_feature_value (global.pDevice, "DeviceID")), calibrationURL))) { ROS_ERROR("ERROR: Could not read calibrationURL from parameter server"); } // Start the camerainfo manager. global.pCameraInfoManager = new camera_info_manager::CameraInfoManager(ros::NodeHandle(ros::this_node::getName()), arv_device_get_string_feature_value (global.pDevice, "DeviceID"), calibrationURL); // Start the dynamic_reconfigure server. dynamic_reconfigure::Server<Config> reconfigureServer; dynamic_reconfigure::Server<Config>::CallbackType reconfigureCallback; reconfigureCallback = boost::bind(&RosReconfigure_callback, _1, _2); reconfigureServer.setCallback(reconfigureCallback); ros::Duration(2.0).sleep(); // Get parameter current values. global.xRoi=0; global.yRoi=0; global.widthRoi=0; global.heightRoi=0; arv_camera_get_region (global.pCamera, &global.xRoi, &global.yRoi, &global.widthRoi, &global.heightRoi); global.config.ExposureTimeAbs = global.isImplementedExposureTimeAbs ? arv_device_get_float_feature_value (global.pDevice, "ExposureTimeAbs") : 0; global.config.Gain = global.isImplementedGain ? arv_camera_get_gain (global.pCamera) : 0.0; global.pszPixelformat = g_string_ascii_down(g_string_new(arv_device_get_string_feature_value(global.pDevice, "PixelFormat")))->str; global.nBytesPixel = ARV_PIXEL_FORMAT_BYTE_PER_PIXEL(arv_device_get_integer_feature_value(global.pDevice, "PixelFormat")); global.config.FocusPos = global.isImplementedFocusPos ? arv_device_get_integer_feature_value (global.pDevice, "FocusPos") : 0; // Print information. ROS_INFO (" Using Camera Configuration:"); ROS_INFO (" ---------------------------"); ROS_INFO (" Vendor name = %s", arv_device_get_string_feature_value (global.pDevice, "DeviceVendorName")); ROS_INFO (" Model name = %s", arv_device_get_string_feature_value (global.pDevice, "DeviceModelName")); ROS_INFO (" Device id = %s", arv_device_get_string_feature_value (global.pDevice, "DeviceID")); ROS_INFO (" Sensor width = %d", global.widthSensor); ROS_INFO (" Sensor height = %d", global.heightSensor); ROS_INFO (" ROI x,y,w,h = %d, %d, %d, %d", global.xRoi, global.yRoi, global.widthRoi, global.heightRoi); ROS_INFO (" Pixel format = %s", global.pszPixelformat); ROS_INFO (" BytesPerPixel = %d", global.nBytesPixel); ROS_INFO (" Acquisition Mode = %s", global.isImplementedAcquisitionMode ? arv_device_get_string_feature_value (global.pDevice, "AcquisitionMode") : "(not implemented in camera)"); ROS_INFO (" Trigger Mode = %s", global.isImplementedTriggerMode ? arv_device_get_string_feature_value (global.pDevice, "TriggerMode") : "(not implemented in camera)"); ROS_INFO (" Trigger Source = %s", global.isImplementedTriggerSource ? arv_device_get_string_feature_value(global.pDevice, "TriggerSource") : "(not implemented in camera)"); ROS_INFO (" Can set FrameRate: %s", global.isImplementedAcquisitionFrameRate ? "True" : "False"); if (global.isImplementedAcquisitionFrameRate) { global.config.AcquisitionFrameRate = arv_device_get_float_feature_value (global.pDevice, global.keyAcquisitionFrameRate); ROS_INFO (" AcquisitionFrameRate = %g hz", global.config.AcquisitionFrameRate); } ROS_INFO (" Can set Exposure: %s", global.isImplementedExposureTimeAbs ? "True" : "False"); if (global.isImplementedExposureTimeAbs) { ROS_INFO (" Can set ExposureAuto: %s", global.isImplementedExposureAuto ? "True" : "False"); ROS_INFO (" Exposure = %g us in range [%g,%g]", global.config.ExposureTimeAbs, global.configMin.ExposureTimeAbs, global.configMax.ExposureTimeAbs); } ROS_INFO (" Can set Gain: %s", global.isImplementedGain ? "True" : "False"); if (global.isImplementedGain) { ROS_INFO (" Can set GainAuto: %s", global.isImplementedGainAuto ? "True" : "False"); ROS_INFO (" Gain = %f %% in range [%f,%f]", global.config.Gain, global.configMin.Gain, global.configMax.Gain); } ROS_INFO (" Can set FocusPos: %s", global.isImplementedFocusPos ? "True" : "False"); if (global.isImplementedMtu) ROS_INFO (" Network mtu = %lu", arv_device_get_integer_feature_value(global.pDevice, "GevSCPSPacketSize")); ROS_INFO (" ---------------------------"); // // Print the tree of camera features, with their values. // ROS_INFO (" ----------------------------------------------------------------------------------"); // NODEEX nodeex; // ArvGc *pGenicam=0; // pGenicam = arv_device_get_genicam(global.pDevice); // // nodeex.szName = "Root"; // nodeex.pNode = (ArvDomNode *)arv_gc_get_node(pGenicam, nodeex.szName); // nodeex.pNodeSibling = NULL; // PrintDOMTree(pGenicam, nodeex, 0); // ROS_INFO (" ----------------------------------------------------------------------------------"); ArvGvStream *pStream = NULL; while (TRUE) { pStream = CreateStream(); if (pStream) break; else { ROS_WARN("Could not create image stream for %s. Retrying...", pszGuid); ros::Duration(1.0).sleep(); ros::spinOnce(); } } ApplicationData applicationdata; applicationdata.nBuffers=0; applicationdata.main_loop = 0; // Set up image_raw. image_transport::ImageTransport *pTransport = new image_transport::ImageTransport(*global.phNode); global.publisher = pTransport->advertiseCamera(ros::this_node::getName()+"/image_raw", 1); // Connect signals with callbacks. g_signal_connect (pStream, "new-buffer", G_CALLBACK (NewBuffer_callback), &applicationdata); g_signal_connect (global.pDevice, "control-lost", G_CALLBACK (ControlLost_callback), NULL); g_timeout_add_seconds (1, PeriodicTask_callback, &applicationdata); arv_stream_set_emit_signals ((ArvStream *)pStream, TRUE); void (*pSigintHandlerOld)(int); pSigintHandlerOld = signal (SIGINT, set_cancel); arv_device_execute_command (global.pDevice, "AcquisitionStart"); applicationdata.main_loop = g_main_loop_new (NULL, FALSE); g_main_loop_run (applicationdata.main_loop); if (global.idSoftwareTriggerTimer) { g_source_remove(global.idSoftwareTriggerTimer); global.idSoftwareTriggerTimer = 0; } signal (SIGINT, pSigintHandlerOld); g_main_loop_unref (applicationdata.main_loop); guint64 n_completed_buffers; guint64 n_failures; guint64 n_underruns; guint64 n_resent; guint64 n_missing; arv_stream_get_statistics ((ArvStream *)pStream, &n_completed_buffers, &n_failures, &n_underruns); ROS_INFO ("Completed buffers = %Lu", (unsigned long long) n_completed_buffers); ROS_INFO ("Failures = %Lu", (unsigned long long) n_failures); ROS_INFO ("Underruns = %Lu", (unsigned long long) n_underruns); arv_gv_stream_get_statistics (pStream, &n_resent, &n_missing); ROS_INFO ("Resent buffers = %Lu", (unsigned long long) n_resent); ROS_INFO ("Missing = %Lu", (unsigned long long) n_missing); arv_device_execute_command (global.pDevice, "AcquisitionStop"); g_object_unref (pStream); } else ROS_ERROR ("No cameras detected."); delete global.phNode; return 0; } // main()
void RosReconfigure_callback(Config &config, uint32_t level) { int changedAcquire; int changedAcquisitionFrameRate; int changedExposureAuto; int changedGainAuto; int changedExposureTimeAbs; int changedGain; int changedAcquisitionMode; int changedTriggerMode; int changedTriggerSource; int changedSoftwarerate; int changedFrameid; int changedFocusPos; int changedMtu; std::string tf_prefix = tf::getPrefixParam(*global.phNode); ROS_DEBUG_STREAM("tf_prefix: " << tf_prefix); if (config.frame_id == "") config.frame_id = "camera"; // Find what the user changed. changedAcquire = (global.config.Acquire != config.Acquire); changedAcquisitionFrameRate = (global.config.AcquisitionFrameRate != config.AcquisitionFrameRate); changedExposureAuto = (global.config.ExposureAuto != config.ExposureAuto); changedExposureTimeAbs = (global.config.ExposureTimeAbs != config.ExposureTimeAbs); changedGainAuto = (global.config.GainAuto != config.GainAuto); changedGain = (global.config.Gain != config.Gain); changedAcquisitionMode = (global.config.AcquisitionMode != config.AcquisitionMode); changedTriggerMode = (global.config.TriggerMode != config.TriggerMode); changedTriggerSource = (global.config.TriggerSource != config.TriggerSource); changedSoftwarerate = (global.config.softwaretriggerrate != config.softwaretriggerrate); changedFrameid = (global.config.frame_id != config.frame_id); changedFocusPos = (global.config.FocusPos != config.FocusPos); changedMtu = (global.config.mtu != config.mtu); // Limit params to legal values. config.AcquisitionFrameRate = CLIP(config.AcquisitionFrameRate, global.configMin.AcquisitionFrameRate, global.configMax.AcquisitionFrameRate); config.ExposureTimeAbs = CLIP(config.ExposureTimeAbs, global.configMin.ExposureTimeAbs, global.configMax.ExposureTimeAbs); config.Gain = CLIP(config.Gain, global.configMin.Gain, global.configMax.Gain); config.FocusPos = CLIP(config.FocusPos, global.configMin.FocusPos, global.configMax.FocusPos); config.frame_id = tf::resolve(tf_prefix, config.frame_id); // Adjust other controls dependent on what the user changed. if (changedExposureTimeAbs || changedGainAuto || ((changedAcquisitionFrameRate || changedGain || changedFrameid || changedAcquisitionMode || changedTriggerSource || changedSoftwarerate) && config.ExposureAuto=="Once")) config.ExposureAuto = "Off"; if (changedGain || changedExposureAuto || ((changedAcquisitionFrameRate || changedExposureTimeAbs || changedFrameid || changedAcquisitionMode || changedTriggerSource || changedSoftwarerate) && config.GainAuto=="Once")) config.GainAuto = "Off"; if (changedAcquisitionFrameRate) config.TriggerMode = "Off"; // Find what changed for any reason. changedAcquire = (global.config.Acquire != config.Acquire); changedAcquisitionFrameRate = (global.config.AcquisitionFrameRate != config.AcquisitionFrameRate); changedExposureAuto = (global.config.ExposureAuto != config.ExposureAuto); changedExposureTimeAbs = (global.config.ExposureTimeAbs != config.ExposureTimeAbs); changedGainAuto = (global.config.GainAuto != config.GainAuto); changedGain = (global.config.Gain != config.Gain); changedAcquisitionMode = (global.config.AcquisitionMode != config.AcquisitionMode); changedTriggerMode = (global.config.TriggerMode != config.TriggerMode); changedTriggerSource = (global.config.TriggerSource != config.TriggerSource); changedSoftwarerate = (global.config.softwaretriggerrate != config.softwaretriggerrate); changedFrameid = (global.config.frame_id != config.frame_id); changedFocusPos = (global.config.FocusPos != config.FocusPos); changedMtu = (global.config.mtu != config.mtu); // Set params into the camera. if (changedExposureTimeAbs) { if (global.isImplementedExposureTimeAbs) { ROS_INFO ("Set ExposureTimeAbs = %f", config.ExposureTimeAbs); arv_device_set_float_feature_value (global.pDevice, "ExposureTimeAbs", config.ExposureTimeAbs); } else ROS_INFO ("Camera does not support ExposureTimeAbs."); } if (changedGain) { if (global.isImplementedGain) { ROS_INFO ("Set gain = %f", config.Gain); //arv_device_set_integer_feature_value (global.pDevice, "GainRaw", config.GainRaw); arv_camera_set_gain (global.pCamera, config.Gain); } else ROS_INFO ("Camera does not support Gain or GainRaw."); } if (changedExposureAuto) { if (global.isImplementedExposureAuto && global.isImplementedExposureTimeAbs) { ROS_INFO ("Set ExposureAuto = %s", config.ExposureAuto.c_str()); arv_device_set_string_feature_value (global.pDevice, "ExposureAuto", config.ExposureAuto.c_str()); if (config.ExposureAuto=="Once") { ros::Duration(2.0).sleep(); config.ExposureTimeAbs = arv_device_get_float_feature_value (global.pDevice, "ExposureTimeAbs"); ROS_INFO ("Get ExposureTimeAbs = %f", config.ExposureTimeAbs); config.ExposureAuto = "Off"; } } else ROS_INFO ("Camera does not support ExposureAuto."); } if (changedGainAuto) { if (global.isImplementedGainAuto && global.isImplementedGain) { ROS_INFO ("Set GainAuto = %s", config.GainAuto.c_str()); arv_device_set_string_feature_value (global.pDevice, "GainAuto", config.GainAuto.c_str()); if (config.GainAuto=="Once") { ros::Duration(2.0).sleep(); //config.GainRaw = arv_device_get_integer_feature_value (global.pDevice, "GainRaw"); config.Gain = arv_camera_get_gain (global.pCamera); ROS_INFO ("Get Gain = %f", config.Gain); config.GainAuto = "Off"; } } else ROS_INFO ("Camera does not support GainAuto."); } if (changedAcquisitionFrameRate) { if (global.isImplementedAcquisitionFrameRate) { ROS_INFO ("Set %s = %f", global.keyAcquisitionFrameRate, config.AcquisitionFrameRate); arv_device_set_float_feature_value (global.pDevice, global.keyAcquisitionFrameRate, config.AcquisitionFrameRate); } else ROS_INFO ("Camera does not support AcquisitionFrameRate."); } if (changedTriggerMode) { if (global.isImplementedTriggerMode) { ROS_INFO ("Set TriggerMode = %s", config.TriggerMode.c_str()); arv_device_set_string_feature_value (global.pDevice, "TriggerMode", config.TriggerMode.c_str()); } else ROS_INFO ("Camera does not support TriggerMode."); } if (changedTriggerSource) { if (global.isImplementedTriggerSource) { ROS_INFO ("Set TriggerSource = %s", config.TriggerSource.c_str()); arv_device_set_string_feature_value (global.pDevice, "TriggerSource", config.TriggerSource.c_str()); } else ROS_INFO ("Camera does not support TriggerSource."); } if ((changedTriggerMode || changedTriggerSource || changedSoftwarerate) && config.TriggerMode=="On" && config.TriggerSource=="Software") { if (global.isImplementedAcquisitionFrameRate) { // The software rate is limited by the camera's internal framerate. Bump up the camera's internal framerate if necessary. config.AcquisitionFrameRate = global.configMax.AcquisitionFrameRate; ROS_INFO ("Set %s = %f", global.keyAcquisitionFrameRate, config.AcquisitionFrameRate); arv_device_set_float_feature_value (global.pDevice, global.keyAcquisitionFrameRate, config.AcquisitionFrameRate); } } if (changedTriggerSource || changedSoftwarerate) { // Recreate the software trigger callback. if (global.idSoftwareTriggerTimer) { g_source_remove(global.idSoftwareTriggerTimer); global.idSoftwareTriggerTimer = 0; } if (!strcmp(config.TriggerSource.c_str(),"Software")) { ROS_INFO ("Set softwaretriggerrate = %f", 1000.0/ceil(1000.0 / config.softwaretriggerrate)); // Turn on software timer callback. global.idSoftwareTriggerTimer = g_timeout_add ((guint)ceil(1000.0 / config.softwaretriggerrate), SoftwareTrigger_callback, global.pCamera); } } if (changedFocusPos) { if (global.isImplementedFocusPos) { ROS_INFO ("Set FocusPos = %d", config.FocusPos); arv_device_set_integer_feature_value(global.pDevice, "FocusPos", config.FocusPos); ros::Duration(1.0).sleep(); config.FocusPos = arv_device_get_integer_feature_value(global.pDevice, "FocusPos"); ROS_INFO ("Get FocusPos = %d", config.FocusPos); } else ROS_INFO ("Camera does not support FocusPos."); } if (changedMtu) { if (global.isImplementedMtu) { ROS_INFO ("Set mtu = %d", config.mtu); arv_device_set_integer_feature_value(global.pDevice, "GevSCPSPacketSize", config.mtu); ros::Duration(1.0).sleep(); config.mtu = arv_device_get_integer_feature_value(global.pDevice, "GevSCPSPacketSize"); ROS_INFO ("Get mtu = %d", config.mtu); } else ROS_INFO ("Camera does not support mtu (i.e. GevSCPSPacketSize)."); } if (changedAcquisitionMode) { if (global.isImplementedAcquisitionMode) { ROS_INFO ("Set AcquisitionMode = %s", config.AcquisitionMode.c_str()); arv_device_set_string_feature_value (global.pDevice, "AcquisitionMode", config.AcquisitionMode.c_str()); ROS_INFO("AcquisitionStop"); arv_device_execute_command (global.pDevice, "AcquisitionStop"); ROS_INFO("AcquisitionStart"); arv_device_execute_command (global.pDevice, "AcquisitionStart"); } else ROS_INFO ("Camera does not support AcquisitionMode."); } if (changedAcquire) { if (config.Acquire) { ROS_INFO("AcquisitionStart"); arv_device_execute_command (global.pDevice, "AcquisitionStart"); } else { ROS_INFO("AcquisitionStop"); arv_device_execute_command (global.pDevice, "AcquisitionStop"); } } global.config = config; } // RosReconfigure_callback()