bool CvCaptureCAM_Aravis::init_buffers() { if(stream) { g_object_unref(stream); stream = NULL; } if( (stream = arv_camera_create_stream(camera, NULL, NULL)) ) { g_object_set(stream, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer-size", 0, NULL); g_object_set(stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); g_object_set(stream, "packet-timeout", (unsigned) 40000, "frame-retention", (unsigned) 200000, NULL); payload = arv_camera_get_payload (camera); for (int i = 0; i < num_buffers; i++) arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL)); return true; } return false; }
CamGigE::CamGigE(int id): Cam(id) { mWidth = XSIZE; mHeight = YSIZE; xshift = 0; yshift = 0; exposure = 2*1000; gain = 300; isCapturing = false; framerate = 15.0f; mId = id; g_type_init (); camera = arv_camera_new(NULL); if(camera == NULL) { throw "No camera found"; } arv_camera_set_region(camera, xshift, yshift, mWidth, mHeight); arv_camera_set_exposure_time(camera, exposure); arv_camera_set_gain(camera, gain); arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_MONO_8); //arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_BAYER_BG_8); width = (int)mWidth; height = (int)mHeight; arv_camera_get_region (camera, &xshift, &yshift, &width, &height); payload = arv_camera_get_payload (camera); stream = arv_camera_create_stream (camera, NULL, NULL); if(stream == NULL) { throw "Cannot create stream"; } g_object_set(stream, "packet-timeout", (unsigned) 20 * 1000, "frame-retention", (unsigned) 100 * 1000, NULL); for(int i = 0; i < 4; i++) arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL)); arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_CONTINUOUS); arv_camera_set_frame_rate(camera, framerate); rawdata = new unsigned char[width*height]; }
static unicap_status_t aravis_capture_start( aravis_handle_t handle ) { guint payload; int i; handle->stream = arv_camera_create_stream( handle->camera, aravis_stream_callback, handle); arv_camera_set_acquisition_mode (handle->camera, ARV_ACQUISITION_MODE_CONTINUOUS); arv_camera_start_acquisition (handle->camera); payload = arv_camera_get_payload (handle->camera); for (i=0; i < 8; i++) arv_stream_push_buffer (handle->stream, arv_buffer_new (payload, NULL)); return handle->stream ? STATUS_SUCCESS : STATUS_FAILURE; }
static void preallocated_buffer_test (void) { ArvBuffer *buffer; void *data = g_malloc (1024); buffer = arv_buffer_new (1024, data); g_assert (ARV_IS_BUFFER (buffer)); g_assert (buffer->data == data); g_assert (buffer->user_data == NULL); g_assert (buffer->user_data_destroy_func == NULL); g_assert (buffer->status == ARV_BUFFER_STATUS_CLEARED); g_object_unref (buffer); }
ArvGvStream *CreateStream(void) { gboolean bAutoBuffer = FALSE; gboolean bPacketResend = TRUE; unsigned int timeoutPacket = 40; // milliseconds unsigned int timeoutFrameRetention = 200; ArvGvStream *pStream = (ArvGvStream *)arv_device_create_stream (global.pDevice, NULL, NULL); if (pStream) { ArvBuffer *pBuffer; gint nbytesPayload; if (!ARV_IS_GV_STREAM (pStream)) ROS_WARN("Stream is not a GV_STREAM"); if (bAutoBuffer) g_object_set (pStream, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer-size", 0, NULL); if (!bPacketResend) g_object_set (pStream, "packet-resend", bPacketResend ? ARV_GV_STREAM_PACKET_RESEND_ALWAYS : ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); g_object_set (pStream, "packet-timeout", (unsigned) timeoutPacket * 1000, "frame-retention", (unsigned) timeoutFrameRetention * 1000, NULL); // Load up some buffers. nbytesPayload = arv_camera_get_payload (global.pCamera); for (int i=0; i<50; i++) { pBuffer = arv_buffer_new (nbytesPayload, NULL); arv_stream_push_buffer ((ArvStream *)pStream, pBuffer); } } return pStream; } // CreateStream()
static void simple_buffer_test (void) { ArvBuffer *buffer; buffer = arv_buffer_new (1024, NULL); g_assert (ARV_IS_BUFFER (buffer)); g_assert (buffer->data != NULL); g_assert (buffer->size == 1024); g_assert (buffer->user_data == NULL); g_assert (buffer->user_data_destroy_func == NULL); g_assert (buffer->status == ARV_BUFFER_STATUS_CLEARED); g_object_unref (buffer); }
int main(int argc, char *argv[]) { ArvDevice *device; ArvStream *stream; ArvCamera *camera; ArvGcFeatureNode *feature; guint64 n_completed_buffers; guint64 n_failures; guint64 n_underruns; GOptionContext *context; GError *error = NULL; void (*old_sigint_handler)(int); int i, payload; arv_g_thread_init (NULL); arv_g_type_init (); context = g_option_context_new (NULL); g_option_context_set_summary (context, "Test of heartbeat robustness while continuously changing a feature."); g_option_context_add_main_entries (context, arv_option_entries, NULL); if (!g_option_context_parse (context, &argc, &argv, &error)) { g_option_context_free (context); g_print ("Option parsing failed: %s\n", error->message); g_error_free (error); return EXIT_FAILURE; } g_option_context_free (context); arv_debug_enable (arv_option_debug_domains); camera = arv_camera_new (arv_option_camera_name); if (!ARV_IS_CAMERA (camera)) { printf ("Device not found\n"); return EXIT_FAILURE; } device = arv_camera_get_device (camera); stream = arv_camera_create_stream (camera, NULL, NULL); if (!ARV_IS_STREAM (stream)) { printf ("Invalid device\n"); } else { payload = arv_camera_get_payload (camera); if (ARV_IS_GV_STREAM (stream)) { g_object_set (stream, //"socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_FIXED, "socket-buffer-size", payload*6, "packet-timeout", 1000 * 1000, "frame-retention", 100 * 1000, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL); } for (i = 0; i < 100; i++) arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL)); arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_CONTINUOUS); feature = ARV_GC_FEATURE_NODE (arv_device_get_feature (device, arv_option_feature_name)); arv_camera_start_acquisition (camera); old_sigint_handler = signal (SIGINT, set_cancel); while (!cancel) { ArvBuffer *buffer = arv_stream_timeout_pop_buffer(stream, 2000000); if (buffer) { usleep(10); arv_stream_push_buffer (stream, buffer); } if (!(++i%5)) { char *value; if ((i/100) % 2 == 0) value = g_strdup_printf ("%d", arv_option_min); else value = g_strdup_printf ("%d", arv_option_max); fprintf (stderr, "Setting %s from %s to %s\n", arv_option_feature_name, arv_gc_feature_node_get_value_as_string (feature, NULL), value); arv_gc_feature_node_set_value_from_string (feature, value, NULL); g_free (value); } } signal (SIGINT, old_sigint_handler); arv_stream_get_statistics (stream, &n_completed_buffers, &n_failures, &n_underruns); printf ("\nCompleted buffers = %Lu\n", (unsigned long long) n_completed_buffers); printf ("Failures = %Lu\n", (unsigned long long) n_failures); printf ("Underruns = %Lu\n", (unsigned long long) n_underruns); arv_camera_stop_acquisition (camera); } g_object_unref (camera); return 0; }
static gboolean gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps) { GstAravis* gst_aravis = GST_ARAVIS(src); GstStructure *structure; ArvPixelFormat pixel_format; int height, width; int bpp, depth; const GValue *frame_rate; const char *caps_string; unsigned int i; guint32 fourcc; GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps); arv_camera_stop_acquisition (gst_aravis->camera); if (gst_aravis->stream != NULL) g_object_unref (gst_aravis->stream); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); frame_rate = gst_structure_get_value (structure, "framerate"); gst_structure_get_int (structure, "bpp", &bpp); gst_structure_get_int (structure, "depth", &depth); if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) { const char *string; string = gst_structure_get_string (structure, "format"); fourcc = GST_STR_FOURCC (string); } else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) { gst_structure_get_fourcc (structure, "format", &fourcc); } else fourcc = 0; pixel_format = arv_pixel_format_from_gst_0_10_caps (gst_structure_get_name (structure), bpp, depth, fourcc); arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height); arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning); arv_camera_set_pixel_format (gst_aravis->camera, pixel_format); if (frame_rate != NULL) { double dbl_frame_rate; dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) / (double) gst_value_get_fraction_denominator (frame_rate); GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate); arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate); if (dbl_frame_rate > 0.0) gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT, 3e6 / dbl_frame_rate); else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; } else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %" G_GUINT64_FORMAT " µs", gst_aravis->buffer_timeout_us); GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera)); if(gst_aravis->gain_auto) { arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous"); } else { if (gst_aravis->gain >= 0) { GST_DEBUG_OBJECT (gst_aravis, "Gain = %g", gst_aravis->gain); arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain); } GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %g", arv_camera_get_gain (gst_aravis->camera)); } if(gst_aravis->exposure_auto) { arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS); GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = continuous"); } else { if (gst_aravis->exposure_time_us > 0.0) { GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us); arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF); arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us); } GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera)); } if (gst_aravis->fixed_caps != NULL) gst_caps_unref (gst_aravis->fixed_caps); caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_format); if (caps_string != NULL) { GstStructure *structure; GstCaps *caps; caps = gst_caps_new_empty (); structure = gst_structure_from_string (caps_string, NULL); gst_structure_set (structure, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (frame_rate != NULL) gst_structure_set_value (structure, "framerate", frame_rate); gst_caps_append_structure (caps, structure); gst_aravis->fixed_caps = caps; } else gst_aravis->fixed_caps = NULL; gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera); gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL); if (ARV_IS_GV_STREAM (gst_aravis->stream) && gst_aravis->packet_resend) g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL); else g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++) arv_stream_push_buffer (gst_aravis->stream, arv_buffer_new (gst_aravis->payload, NULL)); GST_LOG_OBJECT (gst_aravis, "Start acquisition"); arv_camera_start_acquisition (gst_aravis->camera); gst_aravis->timestamp_offset = 0; gst_aravis->last_timestamp = 0; return TRUE; }
void * arv_fake_gv_camera_thread (void *user_data) { ArvFakeGvCamera *gv_camera = user_data; ArvBuffer *image_buffer = NULL; GError *error = NULL; GSocketAddress *stream_address = NULL; void *packet_buffer; size_t packet_size; size_t payload = 0; guint16 block_id; ptrdiff_t offset; guint32 gv_packet_size; packet_buffer = g_malloc (ARV_FAKE_GV_CAMERA_BUFFER_SIZE); do { if (arv_fake_camera_get_control_channel_privilege (gv_camera->camera) == 0 || arv_fake_camera_get_acquisition_status (gv_camera->camera) == 0) { if (stream_address != NULL) { g_object_unref (stream_address); stream_address = NULL; g_object_unref (image_buffer); image_buffer = NULL; arv_debug_stream_thread ("[FakeGvCamera::stream_thread] Stop stream"); } g_usleep (100000); } else { if (stream_address == NULL) { GInetAddress *inet_address; char *inet_address_string; stream_address = arv_fake_camera_get_stream_address (gv_camera->camera); inet_address = g_inet_socket_address_get_address (G_INET_SOCKET_ADDRESS (stream_address)); inet_address_string = g_inet_address_to_string (inet_address); arv_debug_stream_thread ("[FakeGvCamera::stream_thread] Start stream to %s (%d)", inet_address_string, g_inet_socket_address_get_port (G_INET_SOCKET_ADDRESS (stream_address))); g_free (inet_address_string); payload = arv_fake_camera_get_payload (gv_camera->camera); image_buffer = arv_buffer_new (payload, NULL); } arv_fake_camera_wait_for_next_frame (gv_camera->camera); arv_fake_camera_fill_buffer (gv_camera->camera, image_buffer, &gv_packet_size); block_id = 0; packet_size = ARV_FAKE_GV_CAMERA_BUFFER_SIZE; arv_gvsp_packet_new_data_leader (image_buffer->frame_id, block_id, image_buffer->timestamp_ns, image_buffer->pixel_format, image_buffer->width, image_buffer->height, image_buffer->x_offset, image_buffer->y_offset, packet_buffer, &packet_size); g_socket_send_to (gv_camera->gvsp_socket, stream_address, packet_buffer, packet_size, NULL, &error); if (error != NULL) { arv_warning_stream_thread ("[ArvFakeGvCamera::stream_thread] Socket send error [%s]", error->message); g_error_free (error); error = NULL; } block_id++; offset = 0; while (offset < payload) { size_t data_size; data_size = MIN (gv_packet_size - ARV_GVSP_PACKET_PROTOCOL_OVERHEAD, payload - offset); packet_size = ARV_FAKE_GV_CAMERA_BUFFER_SIZE; arv_gvsp_packet_new_data_block (image_buffer->frame_id, block_id, data_size, ((char *) image_buffer->data) + offset, packet_buffer, &packet_size); g_socket_send_to (gv_camera->gvsp_socket, stream_address, packet_buffer, packet_size, NULL, NULL); offset += data_size; block_id++; } packet_size = ARV_FAKE_GV_CAMERA_BUFFER_SIZE; arv_gvsp_packet_new_data_trailer (image_buffer->frame_id, block_id, packet_buffer, &packet_size); g_socket_send_to (gv_camera->gvsp_socket, stream_address, packet_buffer, packet_size, NULL, NULL); } } while (!cancel); if (stream_address != NULL) g_object_unref (stream_address); if (image_buffer != NULL) g_object_unref (image_buffer); g_free (packet_buffer); return NULL; }
int main (int argc, char **argv) { ArvCamera * camera; ArvStream *stream; ArvBuffer *buffer; GOptionContext *context; GError *error = NULL; char memory_buffer[100000]; int i; arv_g_thread_init (NULL); arv_g_type_init (); context = g_option_context_new (NULL); g_option_context_add_main_entries (context, arv_option_entries, NULL); if (!g_option_context_parse (context, &argc, &argv, &error)) { g_option_context_free (context); g_print ("Option parsing failed: %s\n", error->message); g_error_free (error); return EXIT_FAILURE; } g_option_context_free (context); if (arv_option_max_frames < 0) arv_option_max_errors_before_abort = -1; save_buffer_fn = GetSaveBufferFn(arv_option_save_type); arv_debug_enable (arv_option_debug_domains); if (arv_option_camera_name == NULL) g_print ("Looking for the first available camera\n"); else g_print ("Looking for camera '%s'\n", arv_option_camera_name); camera = arv_camera_new (arv_option_camera_name); int errors = 0; if (camera == NULL) { g_print("No device found"); return 1; } guint payload_size = arv_camera_get_payload(camera); g_print ("payload size = %d (0x%x)\n", payload_size, payload_size); stream = arv_camera_create_stream (camera, NULL, NULL); if (arv_option_auto_buffer) { g_object_set (stream,"socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO,"socket-buffer-size", 0,NULL); } for (i = 0; i < 30; i++) { arv_stream_push_buffer (stream, arv_buffer_new (payload_size, NULL)); } arv_camera_stop_acquisition(camera); // set the bit depth ArvDevice * device = arv_camera_get_device(camera); ArvGcNode * feature = arv_device_get_feature(device, "PixelFormat"); char * pix_format = "Mono8"; if (arv_option_pixel_format == 14) pix_format = "Mono14"; arv_gc_feature_node_set_value_from_string(ARV_GC_FEATURE_NODE(feature), pix_format, NULL); if (arv_option_pixel_format == 14) { feature = arv_device_get_feature(device, "CMOSBitDepth"); arv_gc_feature_node_set_value_from_string(ARV_GC_FEATURE_NODE(feature), "bit14bit", NULL); } signal (SIGINT, set_cancel); signal (SIGQUIT, set_cancel); int captured_frames = 0; guint64 timeout=1000000; #define _CAN_STOP (arv_option_max_frames > 0 && captured_frames >= arv_option_max_frames) arv_camera_start_acquisition(camera); do { g_usleep (100000); do { buffer = arv_stream_timeout_pop_buffer (stream, timeout); if (buffer == NULL) break; ArvBufferStatus status = arv_buffer_get_status(buffer); fprintf(stderr, "Status is %d\n", status); if (status == ARV_BUFFER_STATUS_SUCCESS) { if (timeout > 100000) timeout -= 1000; errors = 0; if (save_buffer_fn != NULL) { struct timespec timestamp; clock_gettime(CLOCK_REALTIME, ×tamp); char filename[BUFSIZ]; if (strcmp(arv_option_save_prefix,"") != 0) { sprintf(filename, "%s/%s%d.%s", arv_option_save_dir,arv_option_save_prefix, captured_frames, arv_option_save_type); } else { sprintf(filename, "%s/%d.%03ld.%s", arv_option_save_dir, (int)timestamp.tv_sec, (long)(timestamp.tv_nsec/1.0e6), arv_option_save_type); } if ((*save_buffer_fn)(buffer, filename) == false) { g_print("Couldn't save frame %d to %s\n", captured_frames, filename); set_cancel(SIGQUIT); } g_print("Saved frame %d to %s\n", captured_frames, filename); char latest[] = "latest.png"; sprintf(latest, "latest.%s", arv_option_save_type); unlink(latest); symlink(filename, latest); } captured_frames++; g_usleep(arv_option_sample_period); } else { if (timeout < 10000000) timeout+=1000; fprintf(stderr, "%d errors out of %d allowed\n", errors, arv_option_max_errors_before_abort); arv_camera_stop_acquisition(camera); if (++errors > arv_option_max_errors_before_abort && arv_option_max_errors_before_abort >= 0) { set_cancel(SIGQUIT); } else { arv_camera_start_acquisition(camera); } } arv_stream_push_buffer (stream, buffer); } while (!cancel && buffer != NULL && !_CAN_STOP); } while (!cancel && !_CAN_STOP); arv_camera_stop_acquisition(camera); guint64 n_processed_buffers; guint64 n_failures; guint64 n_underruns; arv_stream_get_statistics (stream, &n_processed_buffers, &n_failures, &n_underruns); g_print ("Processed buffers = %Lu\n", (unsigned long long) n_processed_buffers); g_print ("Failures = %Lu\n", (unsigned long long) n_failures); g_print ("Underruns = %Lu\n", (unsigned long long) n_underruns); g_object_unref (stream); g_object_unref (camera); return (errors > 0); }
static gboolean gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps) { GstAravis* gst_aravis = GST_ARAVIS(src); GstStructure *structure; ArvPixelFormat pixel_format; int height, width; int bpp, depth; const GValue *frame_rate; const char *caps_string; unsigned int i; guint32 fourcc; GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps); arv_camera_stop_acquisition (gst_aravis->camera); if (gst_aravis->stream != NULL) g_object_unref (gst_aravis->stream); structure = gst_caps_get_structure (caps, 0); gst_structure_get_int (structure, "width", &width); gst_structure_get_int (structure, "height", &height); frame_rate = gst_structure_get_value (structure, "framerate"); gst_structure_get_fourcc (structure, "format", &fourcc); gst_structure_get_int (structure, "bpp", &bpp); gst_structure_get_int (structure, "depth", &depth); pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc); arv_camera_set_region (gst_aravis->camera, 0, 0, width, height); arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning); arv_camera_set_pixel_format (gst_aravis->camera, pixel_format); if (frame_rate != NULL) { double dbl_frame_rate; dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) / (double) gst_value_get_fraction_denominator (frame_rate); GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate); arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate); if (dbl_frame_rate > 0.0) gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT, 3e6 / dbl_frame_rate); else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; } else gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT; GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us); GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera)); GST_DEBUG_OBJECT (gst_aravis, "Gain = %d", gst_aravis->gain); arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain); GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %d", arv_camera_get_gain (gst_aravis->camera)); GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us); arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us); GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera)); if (gst_aravis->fixed_caps != NULL) gst_caps_unref (gst_aravis->fixed_caps); caps_string = arv_pixel_format_to_gst_caps_string (pixel_format); if (caps_string != NULL) { GstStructure *structure; GstCaps *caps; caps = gst_caps_new_empty (); structure = gst_structure_from_string (caps_string, NULL); gst_structure_set (structure, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); if (frame_rate != NULL) gst_structure_set_value (structure, "framerate", frame_rate); gst_caps_append_structure (caps, structure); gst_aravis->fixed_caps = caps; } else gst_aravis->fixed_caps = NULL; gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera); gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL); for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++) arv_stream_push_buffer (gst_aravis->stream, arv_buffer_new (gst_aravis->payload, NULL)); GST_LOG_OBJECT (gst_aravis, "Start acquisition"); arv_camera_start_acquisition (gst_aravis->camera); gst_aravis->timestamp_offset = 0; gst_aravis->last_timestamp = 0; return TRUE; }
bool CameraGigeAravis::grabInitialization(){ frameCounter = 0; payload = arv_camera_get_payload (camera); BOOST_LOG_SEV(logger, notification) << "Camera payload : " << payload; pixFormat = arv_camera_get_pixel_format(camera); arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax); BOOST_LOG_SEV(logger, notification) << "Camera exposure bound min : " << exposureMin; BOOST_LOG_SEV(logger, notification) << "Camera exposure bound max : " << exposureMax; arv_camera_get_gain_bounds (camera, &gainMin, &gainMax); BOOST_LOG_SEV(logger, notification) << "Camera gain bound min : " << gainMin; BOOST_LOG_SEV(logger, notification) << "Camera gain bound max : " << gainMax; arv_camera_set_frame_rate(camera, 30); fps = arv_camera_get_frame_rate(camera); BOOST_LOG_SEV(logger, notification) << "Camera frame rate : " << fps; capsString = arv_pixel_format_to_gst_caps_string(pixFormat); BOOST_LOG_SEV(logger, notification) << "Camera format : " << capsString; gain = arv_camera_get_gain(camera); BOOST_LOG_SEV(logger, notification) << "Camera gain : " << gain; exp = arv_camera_get_exposure_time(camera); BOOST_LOG_SEV(logger, notification) << "Camera exposure : " << exp; cout << endl; cout << "DEVICE SELECTED : " << arv_camera_get_device_id(camera) << endl; cout << "DEVICE NAME : " << arv_camera_get_model_name(camera) << endl; cout << "DEVICE VENDOR : " << arv_camera_get_vendor_name(camera) << endl; cout << "PAYLOAD : " << payload << endl; cout << "Width : " << mWidth << endl << "Height : " << mHeight << endl; cout << "Exp Range : [" << exposureMin << " - " << exposureMax << "]" << endl; cout << "Exp : " << exp << endl; cout << "Gain Range : [" << gainMin << " - " << gainMax << "]" << endl; cout << "Gain : " << gain << endl; cout << "Fps : " << fps << endl; cout << "Type : " << capsString << endl; cout << endl; // Create a new stream object. Open stream on Camera. stream = arv_camera_create_stream(camera, NULL, NULL); if(stream == NULL){ BOOST_LOG_SEV(logger, critical) << "Fail to create stream with arv_camera_create_stream()"; return false; } if (ARV_IS_GV_STREAM(stream)){ bool arv_option_auto_socket_buffer = true; bool arv_option_no_packet_resend = true; unsigned int arv_option_packet_timeout = 20; unsigned int arv_option_frame_retention = 100; if(arv_option_auto_socket_buffer){ g_object_set(stream, // ARV_GV_STREAM_SOCKET_BUFFER_FIXED : socket buffer is set to a given fixed value. // ARV_GV_STREAM_SOCKET_BUFFER_AUTO: socket buffer is set with respect to the payload size. "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO, // Socket buffer size, in bytes. // Allowed values: >= G_MAXULONG // Default value: 0 "socket-buffer-size", 0, NULL); } if(arv_option_no_packet_resend){ // # packet-resend : Enables or disables the packet resend mechanism // If packet resend is disabled and a packet has been lost during transmission, // the grab result for the returned buffer holding the image will indicate that // the grab failed and the image will be incomplete. // // If packet resend is enabled and a packet has been lost during transmission, // a request is sent to the camera. If the camera still has the packet in its // buffer, it will resend the packet. If there are several lost packets in a // row, the resend requests will be combined. g_object_set(stream, // ARV_GV_STREAM_PACKET_RESEND_NEVER: never request a packet resend // ARV_GV_STREAM_PACKET_RESEND_ALWAYS: request a packet resend if a packet was missing // Default value: ARV_GV_STREAM_PACKET_RESEND_ALWAYS "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL); } g_object_set(stream, // # packet-timeout // The Packet Timeout parameter defines how long (in milliseconds) we will wait for // the next expected packet before it initiates a resend request. // Packet timeout, in µs. // Allowed values: [1000,10000000] // Default value: 40000 "packet-timeout",/* (unsigned) arv_option_packet_timeout * 1000*/(unsigned)40000, // # frame-retention // The Frame Retention parameter sets the timeout (in milliseconds) for the // frame retention timer. Whenever detection of the leader is made for a frame, // the frame retention timer starts. The timer resets after each packet in the // frame is received and will timeout after the last packet is received. If the // timer times out at any time before the last packet is received, the buffer for // the frame will be released and will be indicated as an unsuccessful grab. // Packet retention, in µs. // Allowed values: [1000,10000000] // Default value: 200000 "frame-retention", /*(unsigned) arv_option_frame_retention * 1000*/(unsigned) 200000,NULL); }else return false; // Push 50 buffer in the stream input buffer queue. for (int i = 0; i < 50; i++) arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL)); return true; }
void * _thread (void *user_data) { ArvGvFakeCamera *gv_fake_camera = user_data; ArvBuffer *image_buffer = NULL; GError *error = NULL; GSocketAddress *stream_address = NULL; void *packet_buffer; size_t packet_size; size_t payload = 0; guint16 block_id; ptrdiff_t offset; guint32 gv_packet_size; GInputVector input_vector; int n_events; gboolean is_streaming = FALSE; input_vector.buffer = g_malloc0 (ARV_GV_FAKE_CAMERA_BUFFER_SIZE); input_vector.size = ARV_GV_FAKE_CAMERA_BUFFER_SIZE; packet_buffer = g_malloc (ARV_GV_FAKE_CAMERA_BUFFER_SIZE); do { guint64 next_timestamp_us; guint64 sleep_time_us; if (is_streaming) { sleep_time_us = arv_fake_camera_get_sleep_time_for_next_frame (gv_fake_camera->priv->camera, &next_timestamp_us); } else { sleep_time_us = 100000; next_timestamp_us = g_get_real_time () + sleep_time_us; } do { gint timeout_ms; timeout_ms = MIN (100, (next_timestamp_us - g_get_real_time ()) / 1000LL); if (timeout_ms < 0) timeout_ms = 0; n_events = g_poll (gv_fake_camera->priv->gvcp_fds, 2, timeout_ms); if (n_events > 0) { GSocketAddress *remote_address = NULL; int count; count = g_socket_receive_message (gv_fake_camera->priv->gvcp_socket, &remote_address, &input_vector, 1, NULL, NULL, NULL, NULL, NULL); if (count > 0) { if (handle_control_packet (gv_fake_camera, gv_fake_camera->priv->gvcp_socket, remote_address, input_vector.buffer, count)) arv_debug_device ("[GvFakeCamera::thread] Control packet received"); } g_clear_object (&remote_address); if (gv_fake_camera->priv->discovery_socket != NULL) { count = g_socket_receive_message (gv_fake_camera->priv->discovery_socket, &remote_address, &input_vector, 1, NULL, NULL, NULL, NULL, NULL); if (count > 0) { if (handle_control_packet (gv_fake_camera, gv_fake_camera->priv->discovery_socket, remote_address, input_vector.buffer, count)) arv_debug_device ("[GvFakeCamera::thread]" " Control packet received on discovery socket\n"); } g_clear_object (&remote_address); } if (arv_fake_camera_get_control_channel_privilege (gv_fake_camera->priv->camera) == 0 || arv_fake_camera_get_acquisition_status (gv_fake_camera->priv->camera) == 0) { if (stream_address != NULL) { g_object_unref (stream_address); stream_address = NULL; g_object_unref (image_buffer); image_buffer = NULL; arv_debug_stream_thread ("[GvFakeCamera::thread] Stop stream"); } is_streaming = FALSE; } } } while (!g_atomic_int_get (&gv_fake_camera->priv->cancel) && g_get_real_time () < next_timestamp_us); if (arv_fake_camera_get_control_channel_privilege (gv_fake_camera->priv->camera) != 0 && arv_fake_camera_get_acquisition_status (gv_fake_camera->priv->camera) != 0) { if (stream_address == NULL) { GInetAddress *inet_address; char *inet_address_string; stream_address = arv_fake_camera_get_stream_address (gv_fake_camera->priv->camera); inet_address = g_inet_socket_address_get_address (G_INET_SOCKET_ADDRESS (stream_address)); inet_address_string = g_inet_address_to_string (inet_address); arv_debug_stream_thread ("[GvFakeCamera::thread] Start stream to %s (%d)", inet_address_string, g_inet_socket_address_get_port (G_INET_SOCKET_ADDRESS (stream_address))); g_free (inet_address_string); payload = arv_fake_camera_get_payload (gv_fake_camera->priv->camera); image_buffer = arv_buffer_new (payload, NULL); } arv_fake_camera_fill_buffer (gv_fake_camera->priv->camera, image_buffer, &gv_packet_size); arv_debug_stream_thread ("[GvFakeCamera::thread] Send frame %d", image_buffer->priv->frame_id); block_id = 0; packet_size = ARV_GV_FAKE_CAMERA_BUFFER_SIZE; arv_gvsp_packet_new_data_leader (image_buffer->priv->frame_id, block_id, image_buffer->priv->timestamp_ns, image_buffer->priv->pixel_format, image_buffer->priv->width, image_buffer->priv->height, image_buffer->priv->x_offset, image_buffer->priv->y_offset, packet_buffer, &packet_size); g_socket_send_to (gv_fake_camera->priv->gvsp_socket, stream_address, packet_buffer, packet_size, NULL, &error); if (error != NULL) { arv_warning_stream_thread ("[GvFakeCamera::thread] Failed to send leader for frame %d: %s", image_buffer->priv->frame_id, error->message); g_clear_error (&error); } block_id++; offset = 0; while (offset < payload) { size_t data_size; data_size = MIN (gv_packet_size - ARV_GVSP_PACKET_PROTOCOL_OVERHEAD, payload - offset); packet_size = ARV_GV_FAKE_CAMERA_BUFFER_SIZE; arv_gvsp_packet_new_data_block (image_buffer->priv->frame_id, block_id, data_size, ((char *) image_buffer->priv->data) + offset, packet_buffer, &packet_size); g_socket_send_to (gv_fake_camera->priv->gvsp_socket, stream_address, packet_buffer, packet_size, NULL, &error); if (error != NULL) { arv_debug_stream_thread ("[GvFakeCamera::thread] Failed to send frame block %d for frame: %s", block_id, image_buffer->priv->frame_id, error->message); g_clear_error (&error); } offset += data_size; block_id++; } packet_size = ARV_GV_FAKE_CAMERA_BUFFER_SIZE; arv_gvsp_packet_new_data_trailer (image_buffer->priv->frame_id, block_id, packet_buffer, &packet_size); g_socket_send_to (gv_fake_camera->priv->gvsp_socket, stream_address, packet_buffer, packet_size, NULL, &error); if (error != NULL) { arv_debug_stream_thread ("[GvFakeCamera::thread] Failed to send trailer for frame %d: %s", image_buffer->priv->frame_id, error->message); g_clear_error (&error); } is_streaming = TRUE; } } while (!g_atomic_int_get (&gv_fake_camera->priv->cancel)); if (stream_address != NULL) g_object_unref (stream_address); if (image_buffer != NULL) g_object_unref (image_buffer); g_free (packet_buffer); g_free (input_vector.buffer); return NULL; }
int main (int argc, char **argv) { ApplicationData data; ArvCamera *camera; ArvStream *stream; ArvBuffer *buffer; int i; data.buffer_count = 0; /* Mandatory glib type system initialization */ arv_g_type_init (); /* Instantiation of the first available camera */ camera = arv_camera_new (NULL); if (camera != NULL) { void (*old_sigint_handler)(int); gint payload; guint software_trigger_source = 0; /* Set region of interrest to a 200x200 pixel area */ arv_camera_set_region (camera, 0, 0, 200, 200); /* Set frame rate to 10 Hz */ arv_camera_set_frame_rate (camera, 10.0); /* retrieve image payload (number of bytes per image) */ payload = arv_camera_get_payload (camera); /* Create a new stream object */ stream = arv_camera_create_stream (camera, NULL, NULL); if (stream != NULL) { /* Push 50 buffer in the stream input buffer queue */ for (i = 0; i < 50; i++) arv_stream_push_buffer (stream, arv_buffer_new (payload, NULL)); /* Start the video stream */ arv_camera_start_acquisition (camera); /* Connect the new-buffer signal */ g_signal_connect (stream, "new-buffer", G_CALLBACK (new_buffer_cb), &data); /* And enable emission of this signal (it's disabled by default for performance reason) */ arv_stream_set_emit_signals (stream, TRUE); /* Connect the control-lost signal */ g_signal_connect (arv_camera_get_device (camera), "control-lost", G_CALLBACK (control_lost_cb), NULL); /* Install the callback for frame rate display */ g_timeout_add_seconds (1, periodic_task_cb, &data); /* Create a new glib main loop */ data.main_loop = g_main_loop_new (NULL, FALSE); old_sigint_handler = signal (SIGINT, set_cancel); /* Run the main loop */ g_main_loop_run (data.main_loop); signal (SIGINT, old_sigint_handler); g_main_loop_unref (data.main_loop); /* Stop the video stream */ arv_camera_stop_acquisition (camera); g_object_unref (stream); } else printf ("Can't create stream thread (check if the device is not already used)\n"); g_object_unref (camera); } else printf ("No camera found\n"); return 0; }