guint16 arv_gv_stream_get_port (ArvGvStream *gv_stream) { GInetSocketAddress *local_address; guint16 port; g_return_val_if_fail (ARV_IS_GV_STREAM (gv_stream), 0); local_address = G_INET_SOCKET_ADDRESS (g_socket_get_local_address (gv_stream->socket, NULL)); port = g_inet_socket_address_get_port (local_address); g_object_unref (local_address); return port; }
void arv_gv_stream_get_statistics (ArvGvStream *gv_stream, guint64 *n_resent_packets, guint64 *n_missing_packets) { ArvGvStreamThreadData *thread_data; g_return_if_fail (ARV_IS_GV_STREAM (gv_stream)); thread_data = gv_stream->thread_data; if (n_resent_packets != NULL) *n_resent_packets = thread_data->n_resent_packets; if (n_missing_packets != NULL) *n_missing_packets = thread_data->n_missing_packets; }
ArvGvStream *CreateStream(void) { gboolean bAutoBuffer = FALSE; gboolean bPacketResend = TRUE; unsigned int timeoutPacket = 40; // milliseconds unsigned int timeoutFrameRetention = 200; ArvGvStream *pStream = (ArvGvStream *)arv_device_create_stream (global.pDevice, NULL, NULL); if (pStream) { ArvBuffer *pBuffer; gint nbytesPayload; if (!ARV_IS_GV_STREAM (pStream)) ROS_WARN("Stream is not a GV_STREAM"); if (bAutoBuffer) g_object_set (pStream, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer-size", 0, NULL); if (!bPacketResend) g_object_set (pStream, "packet-resend", bPacketResend ? ARV_GV_STREAM_PACKET_RESEND_ALWAYS : ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); g_object_set (pStream, "packet-timeout", (unsigned) timeoutPacket * 1000, "frame-retention", (unsigned) timeoutFrameRetention * 1000, NULL); // Load up some buffers. nbytesPayload = arv_camera_get_payload (global.pCamera); for (int i=0; i<50; i++) { pBuffer = arv_buffer_new (nbytesPayload, NULL); arv_stream_push_buffer ((ArvStream *)pStream, pBuffer); } } return pStream; } // CreateStream()
int main(int argc, char *argv[]) { ArvDevice *device; ArvStream *stream; ArvCamera *camera; ArvGcFeatureNode *feature; guint64 n_completed_buffers; guint64 n_failures; guint64 n_underruns; GOptionContext *context; GError *error = NULL; void (*old_sigint_handler)(int); int i, payload; arv_g_thread_init (NULL); arv_g_type_init (); context = g_option_context_new (NULL); g_option_context_set_summary (context, "Test of heartbeat robustness while continuously changing a feature."); g_option_context_add_main_entries (context, arv_option_entries, NULL); if (!g_option_context_parse (context, &argc, &argv, &error)) { g_option_context_free (context); g_print ("Option parsing failed: %s\n", error->message); g_error_free (error); return EXIT_FAILURE; } g_option_context_free (context); arv_debug_enable (arv_option_debug_domains); camera = arv_camera_new (arv_option_camera_name); if (!ARV_IS_CAMERA (camera)) { printf ("Device not found\n"); return EXIT_FAILURE; } device = arv_camera_get_device (camera); stream = arv_camera_create_stream (camera, NULL, NULL); if (!ARV_IS_STREAM (stream)) { printf ("Invalid device\n"); } else { payload = arv_camera_get_payload (camera); if (ARV_IS_GV_STREAM (stream)) { g_object_set (stream, //"socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_FIXED, "socket-buffer-size", payload*6, "packet-timeout", 1000 * 1000, "frame-retention", 100 * 1000, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL); } for (i = 0; i < 100; i++) arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL)); arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_CONTINUOUS); feature = ARV_GC_FEATURE_NODE (arv_device_get_feature (device, arv_option_feature_name)); arv_camera_start_acquisition (camera); old_sigint_handler = signal (SIGINT, set_cancel); while (!cancel) { ArvBuffer *buffer = arv_stream_timeout_pop_buffer(stream, 2000000); if (buffer) { usleep(10); arv_stream_push_buffer (stream, buffer); } if (!(++i%5)) { char *value; if ((i/100) % 2 == 0) value = g_strdup_printf ("%d", arv_option_min); else value = g_strdup_printf ("%d", arv_option_max); fprintf (stderr, "Setting %s from %s to %s\n", arv_option_feature_name, arv_gc_feature_node_get_value_as_string (feature, NULL), value); arv_gc_feature_node_set_value_from_string (feature, value, NULL); g_free (value); } } signal (SIGINT, old_sigint_handler); arv_stream_get_statistics (stream, &n_completed_buffers, &n_failures, &n_underruns); printf ("\nCompleted buffers = %Lu\n", (unsigned long long) n_completed_buffers); printf ("Failures = %Lu\n", (unsigned long long) n_failures); printf ("Underruns = %Lu\n", (unsigned long long) n_underruns); arv_camera_stop_acquisition (camera); } g_object_unref (camera); return 0; }
static gboolean gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps) { GstAravis* gst_aravis = GST_ARAVIS(src); GstStructure *structure; ArvPixelFormat pixel_format; int height, width; int bpp, depth; const GValue *frame_rate; const char *caps_string; unsigned int i; guint32 fourcc; GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps); arv_camera_stop_acquisition (gst_aravis->camera); if (gst_aravis->stream != NULL) g_object_unref (gst_aravis->stream); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); frame_rate = gst_structure_get_value (structure, "framerate"); gst_structure_get_int (structure, "bpp", &bpp); gst_structure_get_int (structure, "depth", &depth); if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) { const char *string; string = gst_structure_get_string (structure, "format"); fourcc = GST_STR_FOURCC (string); } else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) { gst_structure_get_fourcc (structure, "format", &fourcc); } else fourcc = 0; pixel_format = arv_pixel_format_from_gst_0_10_caps (gst_structure_get_name (structure), bpp, depth, fourcc); arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height); arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning); arv_camera_set_pixel_format (gst_aravis->camera, pixel_format); if (frame_rate != NULL) { double dbl_frame_rate; dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) / (double) gst_value_get_fraction_denominator (frame_rate); GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate); arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate); if (dbl_frame_rate > 0.0) gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT, 3e6 / dbl_frame_rate); else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; } else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %" G_GUINT64_FORMAT " µs", gst_aravis->buffer_timeout_us); GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera)); if(gst_aravis->gain_auto) { arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous"); } else { if (gst_aravis->gain >= 0) { GST_DEBUG_OBJECT (gst_aravis, "Gain = %g", gst_aravis->gain); arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain); } GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %g", arv_camera_get_gain (gst_aravis->camera)); } if(gst_aravis->exposure_auto) { arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = continuous"); } else { if (gst_aravis->exposure_time_us > 0.0) { GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us); arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us); } GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera)); } if (gst_aravis->fixed_caps != NULL) gst_caps_unref (gst_aravis->fixed_caps); caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_format); if (caps_string != NULL) { GstStructure *structure; GstCaps *caps; caps = gst_caps_new_empty (); structure = gst_structure_from_string (caps_string, NULL); gst_structure_set (structure, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (frame_rate != NULL) gst_structure_set_value (structure, "framerate", frame_rate); gst_caps_append_structure (caps, structure); gst_aravis->fixed_caps = caps; } else gst_aravis->fixed_caps = NULL; gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera); gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL); if (ARV_IS_GV_STREAM (gst_aravis->stream) && gst_aravis->packet_resend) g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL); else g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++) arv_stream_push_buffer (gst_aravis->stream, arv_buffer_new (gst_aravis->payload, NULL)); GST_LOG_OBJECT (gst_aravis, "Start acquisition"); arv_camera_start_acquisition (gst_aravis->camera); gst_aravis->timestamp_offset = 0; gst_aravis->last_timestamp = 0; return TRUE; }
bool CameraGigeAravis::grabInitialization(){ frameCounter = 0; payload = arv_camera_get_payload (camera); BOOST_LOG_SEV(logger, notification) << "Camera payload : " << payload; pixFormat = arv_camera_get_pixel_format(camera); arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax); BOOST_LOG_SEV(logger, notification) << "Camera exposure bound min : " << exposureMin; BOOST_LOG_SEV(logger, notification) << "Camera exposure bound max : " << exposureMax; arv_camera_get_gain_bounds (camera, &gainMin, &gainMax); BOOST_LOG_SEV(logger, notification) << "Camera gain bound min : " << gainMin; BOOST_LOG_SEV(logger, notification) << "Camera gain bound max : " << gainMax; arv_camera_set_frame_rate(camera, 30); fps = arv_camera_get_frame_rate(camera); BOOST_LOG_SEV(logger, notification) << "Camera frame rate : " << fps; capsString = arv_pixel_format_to_gst_caps_string(pixFormat); BOOST_LOG_SEV(logger, notification) << "Camera format : " << capsString; gain = arv_camera_get_gain(camera); BOOST_LOG_SEV(logger, notification) << "Camera gain : " << gain; exp = arv_camera_get_exposure_time(camera); BOOST_LOG_SEV(logger, notification) << "Camera exposure : " << exp; cout << endl; cout << "DEVICE SELECTED : " << arv_camera_get_device_id(camera) << endl; cout << "DEVICE NAME : " << arv_camera_get_model_name(camera) << endl; cout << "DEVICE VENDOR : " << arv_camera_get_vendor_name(camera) << endl; cout << "PAYLOAD : " << payload << endl; cout << "Width : " << mWidth << endl << "Height : " << mHeight << endl; cout << "Exp Range : [" << exposureMin << " - " << exposureMax << "]" << endl; cout << "Exp : " << exp << endl; cout << "Gain Range : [" << gainMin << " - " << gainMax << "]" << endl; cout << "Gain : " << gain << endl; cout << "Fps : " << fps << endl; cout << "Type : " << capsString << endl; cout << endl; // Create a new stream object. Open stream on Camera. stream = arv_camera_create_stream(camera, NULL, NULL); if(stream == NULL){ BOOST_LOG_SEV(logger, critical) << "Fail to create stream with arv_camera_create_stream()"; return false; } if (ARV_IS_GV_STREAM(stream)){ bool arv_option_auto_socket_buffer = true; bool arv_option_no_packet_resend = true; unsigned int arv_option_packet_timeout = 20; unsigned int arv_option_frame_retention = 100; if(arv_option_auto_socket_buffer){ g_object_set(stream, // ARV_GV_STREAM_SOCKET_BUFFER_FIXED : socket buffer is set to a given fixed value. // ARV_GV_STREAM_SOCKET_BUFFER_AUTO: socket buffer is set with respect to the payload size. "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, // Socket buffer size, in bytes. // Allowed values: >= G_MAXULONG // Default value: 0 "socket-buffer-size", 0, NULL); } if(arv_option_no_packet_resend){ // # packet-resend : Enables or disables the packet resend mechanism // If packet resend is disabled and a packet has been lost during transmission, // the grab result for the returned buffer holding the image will indicate that // the grab failed and the image will be incomplete. // // If packet resend is enabled and a packet has been lost during transmission, // a request is sent to the camera. If the camera still has the packet in its // buffer, it will resend the packet. If there are several lost packets in a // row, the resend requests will be combined. g_object_set(stream, // ARV_GV_STREAM_PACKET_RESEND_NEVER: never request a packet resend // ARV_GV_STREAM_PACKET_RESEND_ALWAYS: request a packet resend if a packet was missing // Default value: ARV_GV_STREAM_PACKET_RESEND_ALWAYS "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); } g_object_set(stream, // # packet-timeout // The Packet Timeout parameter defines how long (in milliseconds) we will wait for // the next expected packet before it initiates a resend request. // Packet timeout, in µs. // Allowed values: [1000,10000000] // Default value: 40000 "packet-timeout",/* (unsigned) arv_option_packet_timeout * 1000*/(unsigned)40000, // # frame-retention // The Frame Retention parameter sets the timeout (in milliseconds) for the // frame retention timer. Whenever detection of the leader is made for a frame, // the frame retention timer starts. The timer resets after each packet in the // frame is received and will timeout after the last packet is received. If the // timer times out at any time before the last packet is received, the buffer for // the frame will be released and will be indicated as an unsuccessful grab. // Packet retention, in µs. // Allowed values: [1000,10000000] // Default value: 200000 "frame-retention", /*(unsigned) arv_option_frame_retention * 1000*/(unsigned) 200000,NULL); }else return false; // Push 50 buffer in the stream input buffer queue. for (int i = 0; i < 50; i++) arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL)); return true; }