static unicap_status_t aravis_enumerate_devices (unicap_device_t *device, int index) { unicap_status_t status = STATUS_NO_MATCH; if (index == 0) arv_update_device_list (); unicap_void_device (device); if (index < arv_get_n_devices ()){ ArvCamera *camera; strncpy (device->identifier, arv_get_device_id (index), sizeof (device->identifier)-1); camera = arv_camera_new (device->identifier); if (arv_camera_get_model_name (camera)) strncpy (device->model_name, arv_camera_get_model_name (camera), sizeof (device->model_name)-1); if (arv_camera_get_vendor_name (camera)) strncpy (device->vendor_name, arv_camera_get_vendor_name (camera), sizeof (device->vendor_name)-1); strcpy (device->cpi_layer, "aravis_cpi"); status = STATUS_SUCCESS; g_object_unref (camera); } return status; }
bool CvCaptureCAM_Aravis::getDeviceNameById(int id, std::string &device) { arv_update_device_list(); if((id >= 0) && (id < (int)arv_get_n_devices())) { device = arv_get_device_id(id); return true; } return false; }
bool CvCaptureCAM_Aravis::getDeviceNameById(int id, std::string &device) { arv_update_device_list(); int n_devices = arv_get_n_devices(); for(int i = 0; i< n_devices; i++){ if(id == i){ device = arv_get_device_id(i); return true; } } return false; }
bool CameraGigeAravis::getDeviceNameById(int id, string &device){ arv_update_device_list(); int n_devices = arv_get_n_devices(); for(int i = 0; i< n_devices; i++){ if(id == i){ device = arv_get_device_id(i); return true; } } BOOST_LOG_SEV(logger, fail) << "Fail to retrieve camera with this ID."; return false; }
int CCameraList::enumerate_cameras_aravis() { int camera_cnt = 0; #ifdef USE_ARAVIS_GIGE unsigned int n_devices; unsigned int i; printf("Enumerating GigE cameras\n"); arv_update_device_list (); n_devices = arv_get_n_devices (); for (i = 0; i < n_devices; i++) { printf("GigE : %s\n", arv_get_device_id(i)); CGigECamera *cam = new CGigECamera( arv_get_device_id(i) ); _Cameras.push_back(cam); camera_cnt++; } #endif return camera_cnt; }
int main(int argc, char** argv) { char *pszGuid = NULL; char szGuid[512]; int nInterfaces = 0; int nDevices = 0; int i = 0; const char *pkeyAcquisitionFrameRate[2] = {"AcquisitionFrameRate", "AcquisitionFrameRateAbs"}; ArvGcNode *pGcNode; GError *error=NULL; global.bCancel = FALSE; global.config = global.config.__getDefault__(); global.idSoftwareTriggerTimer = 0; ros::init(argc, argv, "camera"); global.phNode = new ros::NodeHandle(); // Service callback for firing nuc's. // Needed since we cannot open another connection to cameras while streaming. ros::NodeHandle nh; ros::ServiceServer NUCservice = nh.advertiseService("FireNUC", NUCService_callback); //g_type_init (); // Print out some useful info. ROS_INFO ("Attached cameras:"); arv_update_device_list(); nInterfaces = arv_get_n_interfaces(); ROS_INFO ("# Interfaces: %d", nInterfaces); nDevices = arv_get_n_devices(); ROS_INFO ("# Devices: %d", nDevices); for (i=0; i<nDevices; i++) ROS_INFO ("Device%d: %s", i, arv_get_device_id(i)); if (nDevices>0) { // Get the camera guid from either the command-line or as a parameter. if (argc==2) { strcpy(szGuid, argv[1]); pszGuid = szGuid; } else { if (global.phNode->hasParam(ros::this_node::getName()+"/guid")) { std::string stGuid; global.phNode->getParam(ros::this_node::getName()+"/guid", stGuid); strcpy (szGuid, stGuid.c_str()); pszGuid = szGuid; } else pszGuid = NULL; } // Open the camera, and set it up. ROS_INFO("Opening: %s", pszGuid ? pszGuid : "(any)"); while (TRUE) { global.pCamera = arv_camera_new(pszGuid); if (global.pCamera) break; else { ROS_WARN ("Could not open camera %s. Retrying...", pszGuid); ros::Duration(1.0).sleep(); ros::spinOnce(); } } global.pDevice = arv_camera_get_device(global.pCamera); ROS_INFO("Opened: %s-%s", arv_device_get_string_feature_value (global.pDevice, "DeviceVendorName"), arv_device_get_string_feature_value (global.pDevice, "DeviceID")); // See if some basic camera features exist. pGcNode = arv_device_get_feature (global.pDevice, "AcquisitionMode"); global.isImplementedAcquisitionMode = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "GainRaw"); global.isImplementedGain = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "Gain"); global.isImplementedGain |= ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "ExposureTimeAbs"); global.isImplementedExposureTimeAbs = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "ExposureAuto"); global.isImplementedExposureAuto = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "GainAuto"); global.isImplementedGainAuto = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "TriggerSelector"); global.isImplementedTriggerSelector = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "TriggerSource"); global.isImplementedTriggerSource = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "TriggerMode"); global.isImplementedTriggerMode = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "FocusPos"); global.isImplementedFocusPos = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "GevSCPSPacketSize"); global.isImplementedMtu = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; pGcNode = arv_device_get_feature (global.pDevice, "AcquisitionFrameRateEnable"); global.isImplementedAcquisitionFrameRateEnable = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; // Find the key name for framerate. global.keyAcquisitionFrameRate = NULL; for (i=0; i<2; i++) { pGcNode = arv_device_get_feature (global.pDevice, pkeyAcquisitionFrameRate[i]); global.isImplementedAcquisitionFrameRate = pGcNode ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE; if (global.isImplementedAcquisitionFrameRate) { global.keyAcquisitionFrameRate = pkeyAcquisitionFrameRate[i]; break; } } // Get parameter bounds. arv_camera_get_exposure_time_bounds (global.pCamera, &global.configMin.ExposureTimeAbs, &global.configMax.ExposureTimeAbs); arv_camera_get_gain_bounds (global.pCamera, &global.configMin.Gain, &global.configMax.Gain); arv_camera_get_sensor_size (global.pCamera, &global.widthSensor, &global.heightSensor); arv_camera_get_width_bounds (global.pCamera, &global.widthRoiMin, &global.widthRoiMax); arv_camera_get_height_bounds (global.pCamera, &global.heightRoiMin, &global.heightRoiMax); if (global.isImplementedFocusPos) { gint64 focusMin64, focusMax64; arv_device_get_integer_feature_bounds (global.pDevice, "FocusPos", &focusMin64, &focusMax64); global.configMin.FocusPos = focusMin64; global.configMax.FocusPos = focusMax64; } else { global.configMin.FocusPos = 0; global.configMax.FocusPos = 0; } global.configMin.AcquisitionFrameRate = 0.0; global.configMax.AcquisitionFrameRate = 1000.0; // Initial camera settings. if (global.isImplementedExposureTimeAbs) arv_device_set_float_feature_value(global.pDevice, "ExposureTimeAbs", global.config.ExposureTimeAbs); if (global.isImplementedGain) arv_camera_set_gain(global.pCamera, global.config.Gain); //arv_device_set_integer_feature_value(global.pDevice, "GainRaw", global.config.GainRaw); if (global.isImplementedAcquisitionFrameRateEnable) arv_device_set_integer_feature_value(global.pDevice, "AcquisitionFrameRateEnable", 1); if (global.isImplementedAcquisitionFrameRate) arv_device_set_float_feature_value(global.pDevice, global.keyAcquisitionFrameRate, global.config.AcquisitionFrameRate); // Set up the triggering. if (global.isImplementedTriggerMode) { if (global.isImplementedTriggerSelector && global.isImplementedTriggerMode) { arv_device_set_string_feature_value(global.pDevice, "TriggerSelector", "AcquisitionStart"); arv_device_set_string_feature_value(global.pDevice, "TriggerMode", "Off"); arv_device_set_string_feature_value(global.pDevice, "TriggerSelector", "FrameStart"); arv_device_set_string_feature_value(global.pDevice, "TriggerMode", "Off"); } } WriteCameraFeaturesFromRosparam (); #ifdef TUNING ros::Publisher pubInt64 = global.phNode->advertise<std_msgs::Int64>(ros::this_node::getName()+"/dt", 100); global.ppubInt64 = &pubInt64; #endif // Grab the calibration file url from the param server if exists std::string calibrationURL = ""; // Default looks in .ros/camera_info if (!(ros::param::get(std::string("calibrationURL").append(arv_device_get_string_feature_value (global.pDevice, "DeviceID")), calibrationURL))) { ROS_ERROR("ERROR: Could not read calibrationURL from parameter server"); } // Start the camerainfo manager. global.pCameraInfoManager = new camera_info_manager::CameraInfoManager(ros::NodeHandle(ros::this_node::getName()), arv_device_get_string_feature_value (global.pDevice, "DeviceID"), calibrationURL); // Start the dynamic_reconfigure server. dynamic_reconfigure::Server<Config> reconfigureServer; dynamic_reconfigure::Server<Config>::CallbackType reconfigureCallback; reconfigureCallback = boost::bind(&RosReconfigure_callback, _1, _2); reconfigureServer.setCallback(reconfigureCallback); ros::Duration(2.0).sleep(); // Get parameter current values. global.xRoi=0; global.yRoi=0; global.widthRoi=0; global.heightRoi=0; arv_camera_get_region (global.pCamera, &global.xRoi, &global.yRoi, &global.widthRoi, &global.heightRoi); global.config.ExposureTimeAbs = global.isImplementedExposureTimeAbs ? arv_device_get_float_feature_value (global.pDevice, "ExposureTimeAbs") : 0; global.config.Gain = global.isImplementedGain ? arv_camera_get_gain (global.pCamera) : 0.0; global.pszPixelformat = g_string_ascii_down(g_string_new(arv_device_get_string_feature_value(global.pDevice, "PixelFormat")))->str; global.nBytesPixel = ARV_PIXEL_FORMAT_BYTE_PER_PIXEL(arv_device_get_integer_feature_value(global.pDevice, "PixelFormat")); global.config.FocusPos = global.isImplementedFocusPos ? arv_device_get_integer_feature_value (global.pDevice, "FocusPos") : 0; // Print information. ROS_INFO (" Using Camera Configuration:"); ROS_INFO (" ---------------------------"); ROS_INFO (" Vendor name = %s", arv_device_get_string_feature_value (global.pDevice, "DeviceVendorName")); ROS_INFO (" Model name = %s", arv_device_get_string_feature_value (global.pDevice, "DeviceModelName")); ROS_INFO (" Device id = %s", arv_device_get_string_feature_value (global.pDevice, "DeviceID")); ROS_INFO (" Sensor width = %d", global.widthSensor); ROS_INFO (" Sensor height = %d", global.heightSensor); ROS_INFO (" ROI x,y,w,h = %d, %d, %d, %d", global.xRoi, global.yRoi, global.widthRoi, global.heightRoi); ROS_INFO (" Pixel format = %s", global.pszPixelformat); ROS_INFO (" BytesPerPixel = %d", global.nBytesPixel); ROS_INFO (" Acquisition Mode = %s", global.isImplementedAcquisitionMode ? arv_device_get_string_feature_value (global.pDevice, "AcquisitionMode") : "(not implemented in camera)"); ROS_INFO (" Trigger Mode = %s", global.isImplementedTriggerMode ? arv_device_get_string_feature_value (global.pDevice, "TriggerMode") : "(not implemented in camera)"); ROS_INFO (" Trigger Source = %s", global.isImplementedTriggerSource ? arv_device_get_string_feature_value(global.pDevice, "TriggerSource") : "(not implemented in camera)"); ROS_INFO (" Can set FrameRate: %s", global.isImplementedAcquisitionFrameRate ? "True" : "False"); if (global.isImplementedAcquisitionFrameRate) { global.config.AcquisitionFrameRate = arv_device_get_float_feature_value (global.pDevice, global.keyAcquisitionFrameRate); ROS_INFO (" AcquisitionFrameRate = %g hz", global.config.AcquisitionFrameRate); } ROS_INFO (" Can set Exposure: %s", global.isImplementedExposureTimeAbs ? "True" : "False"); if (global.isImplementedExposureTimeAbs) { ROS_INFO (" Can set ExposureAuto: %s", global.isImplementedExposureAuto ? "True" : "False"); ROS_INFO (" Exposure = %g us in range [%g,%g]", global.config.ExposureTimeAbs, global.configMin.ExposureTimeAbs, global.configMax.ExposureTimeAbs); } ROS_INFO (" Can set Gain: %s", global.isImplementedGain ? "True" : "False"); if (global.isImplementedGain) { ROS_INFO (" Can set GainAuto: %s", global.isImplementedGainAuto ? "True" : "False"); ROS_INFO (" Gain = %f %% in range [%f,%f]", global.config.Gain, global.configMin.Gain, global.configMax.Gain); } ROS_INFO (" Can set FocusPos: %s", global.isImplementedFocusPos ? "True" : "False"); if (global.isImplementedMtu) ROS_INFO (" Network mtu = %lu", arv_device_get_integer_feature_value(global.pDevice, "GevSCPSPacketSize")); ROS_INFO (" ---------------------------"); // // Print the tree of camera features, with their values. // ROS_INFO (" ----------------------------------------------------------------------------------"); // NODEEX nodeex; // ArvGc *pGenicam=0; // pGenicam = arv_device_get_genicam(global.pDevice); // // nodeex.szName = "Root"; // nodeex.pNode = (ArvDomNode *)arv_gc_get_node(pGenicam, nodeex.szName); // nodeex.pNodeSibling = NULL; // PrintDOMTree(pGenicam, nodeex, 0); // ROS_INFO (" ----------------------------------------------------------------------------------"); ArvGvStream *pStream = NULL; while (TRUE) { pStream = CreateStream(); if (pStream) break; else { ROS_WARN("Could not create image stream for %s. Retrying...", pszGuid); ros::Duration(1.0).sleep(); ros::spinOnce(); } } ApplicationData applicationdata; applicationdata.nBuffers=0; applicationdata.main_loop = 0; // Set up image_raw. image_transport::ImageTransport *pTransport = new image_transport::ImageTransport(*global.phNode); global.publisher = pTransport->advertiseCamera(ros::this_node::getName()+"/image_raw", 1); // Connect signals with callbacks. g_signal_connect (pStream, "new-buffer", G_CALLBACK (NewBuffer_callback), &applicationdata); g_signal_connect (global.pDevice, "control-lost", G_CALLBACK (ControlLost_callback), NULL); g_timeout_add_seconds (1, PeriodicTask_callback, &applicationdata); arv_stream_set_emit_signals ((ArvStream *)pStream, TRUE); void (*pSigintHandlerOld)(int); pSigintHandlerOld = signal (SIGINT, set_cancel); arv_device_execute_command (global.pDevice, "AcquisitionStart"); applicationdata.main_loop = g_main_loop_new (NULL, FALSE); g_main_loop_run (applicationdata.main_loop); if (global.idSoftwareTriggerTimer) { g_source_remove(global.idSoftwareTriggerTimer); global.idSoftwareTriggerTimer = 0; } signal (SIGINT, pSigintHandlerOld); g_main_loop_unref (applicationdata.main_loop); guint64 n_completed_buffers; guint64 n_failures; guint64 n_underruns; guint64 n_resent; guint64 n_missing; arv_stream_get_statistics ((ArvStream *)pStream, &n_completed_buffers, &n_failures, &n_underruns); ROS_INFO ("Completed buffers = %Lu", (unsigned long long) n_completed_buffers); ROS_INFO ("Failures = %Lu", (unsigned long long) n_failures); ROS_INFO ("Underruns = %Lu", (unsigned long long) n_underruns); arv_gv_stream_get_statistics (pStream, &n_resent, &n_missing); ROS_INFO ("Resent buffers = %Lu", (unsigned long long) n_resent); ROS_INFO ("Missing = %Lu", (unsigned long long) n_missing); arv_device_execute_command (global.pDevice, "AcquisitionStop"); g_object_unref (pStream); } else ROS_ERROR ("No cameras detected."); delete global.phNode; return 0; } // main()