Exemple #1
0
bool CvCaptureCAM_Aravis::grabFrame()
{
    // remove content of previous frame
    framebuffer = NULL;

    if(stream) {
        ArvBuffer *arv_buffer = NULL;
        int max_tries = 10;
        int tries = 0;
        for(; tries < max_tries; tries ++) {
            arv_buffer = arv_stream_timeout_pop_buffer (stream, 200000);
            if (arv_buffer != NULL && arv_buffer_get_status (arv_buffer) != ARV_BUFFER_STATUS_SUCCESS) {
                arv_stream_push_buffer (stream, arv_buffer);
            } else break;
        }
        if(arv_buffer != NULL && tries < max_tries) {
            size_t buffer_size;
            framebuffer = (void*)arv_buffer_get_data (arv_buffer, &buffer_size);

            // retieve image size properites
            arv_buffer_get_image_region (arv_buffer, &xoffset, &yoffset, &width, &height);

            // retieve image ID set by camera
            frameID = arv_buffer_get_frame_id(arv_buffer);

            arv_stream_push_buffer(stream, arv_buffer);
            return true;
        }
    }
    return false;
}
Exemple #2
0
bool CvCaptureCAM_Aravis::init_buffers()
{
    if(stream) {
        g_object_unref(stream);
        stream = NULL;
    }
    if( (stream = arv_camera_create_stream(camera, NULL, NULL)) ) {
        g_object_set(stream,
            "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO,
            "socket-buffer-size", 0, NULL);
        g_object_set(stream,
            "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);
        g_object_set(stream,
            "packet-timeout", (unsigned) 40000,
            "frame-retention", (unsigned) 200000, NULL);

        payload = arv_camera_get_payload (camera);

        for (int i = 0; i < num_buffers; i++)
            arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL));

        return true;
    }

    return false;
}
	unsigned char* CamGigE::grab(double timeout)
	{
		ArvBuffer *buffer;

		if(timeout < 0)
			timeout = +INFINITY;

		while(timeout > 0)
		{
			buffer = arv_stream_pop_buffer(stream);

			if (buffer != NULL) {
				memcpy(rawdata, (unsigned char*) buffer->data, mWidth*mHeight);
				arv_stream_push_buffer(stream, buffer);

				return rawdata;
			}

			usleep(1000);
			timeout -= 0.001f;

		}

		throw "Camera capture timeout";

	}
	CamGigE::CamGigE(int id):
			Cam(id)
	{
		mWidth = XSIZE;
		mHeight = YSIZE;

		xshift = 0;
		yshift = 0;
		exposure = 2*1000;
		gain = 300;
		isCapturing = false;
		framerate = 15.0f;

		mId = id;

		g_type_init ();
		camera = arv_camera_new(NULL);
		if(camera == NULL)
		{
			throw "No camera found";
		}

		arv_camera_set_region(camera, xshift, yshift, mWidth, mHeight);
		arv_camera_set_exposure_time(camera, exposure);
		arv_camera_set_gain(camera, gain);

		arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_MONO_8);
		//arv_camera_set_pixel_format(camera, ARV_PIXEL_FORMAT_BAYER_BG_8);


		width = (int)mWidth;
		height = (int)mHeight;
		arv_camera_get_region (camera, &xshift, &yshift, &width, &height);
		payload = arv_camera_get_payload (camera);

		stream = arv_camera_create_stream (camera, NULL, NULL);
		if(stream == NULL) {
			throw "Cannot create stream";
		}
		g_object_set(stream, "packet-timeout", (unsigned) 20 * 1000,
							 "frame-retention", (unsigned) 100 * 1000,
							 NULL);
		for(int i = 0; i < 4; i++)
			arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL));

		arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_CONTINUOUS);
		arv_camera_set_frame_rate(camera, framerate);

		rawdata = new unsigned char[width*height];


	}
Exemple #5
0
static void
new_buffer_cb (ArvStream *stream, ApplicationData *data)
{
	ArvBuffer *buffer;

	buffer = arv_stream_try_pop_buffer (stream);
	if (buffer != NULL) {
		if (buffer->status == ARV_BUFFER_STATUS_SUCCESS)
			data->buffer_count++;
		/* Image processing here */
		arv_stream_push_buffer (stream, buffer);
	}
}
Exemple #6
0
static GstFlowReturn
gst_aravis_create (GstPushSrc * push_src, GstBuffer ** buffer)
{
    GstAravis *gst_aravis;
    ArvBuffer *arv_buffer;

    gst_aravis = GST_ARAVIS (push_src);

    do {
        arv_buffer = arv_stream_timeout_pop_buffer (gst_aravis->stream, gst_aravis->buffer_timeout_us);
        if (arv_buffer != NULL && arv_buffer->status != ARV_BUFFER_STATUS_SUCCESS)
            arv_stream_push_buffer (gst_aravis->stream, arv_buffer);
    } while (arv_buffer != NULL && arv_buffer->status != ARV_BUFFER_STATUS_SUCCESS);

    if (arv_buffer == NULL)
        return GST_FLOW_ERROR;

    *buffer = gst_buffer_new ();

    GST_BUFFER_DATA (*buffer) = arv_buffer->data;
    GST_BUFFER_MALLOCDATA (*buffer) = NULL;
    GST_BUFFER_SIZE (*buffer) = gst_aravis->payload;

    if (gst_aravis->timestamp_offset == 0) {
        gst_aravis->timestamp_offset = arv_buffer->timestamp_ns;
        gst_aravis->last_timestamp = arv_buffer->timestamp_ns;
    }

    GST_BUFFER_TIMESTAMP (*buffer) = arv_buffer->timestamp_ns - gst_aravis->timestamp_offset;
    GST_BUFFER_DURATION (*buffer) = arv_buffer->timestamp_ns - gst_aravis->last_timestamp;

    gst_aravis->last_timestamp = arv_buffer->timestamp_ns;

    arv_stream_push_buffer (gst_aravis->stream, arv_buffer);

    gst_buffer_set_caps (*buffer, gst_aravis->fixed_caps);

    return GST_FLOW_OK;
}
Exemple #7
0
bool CvCaptureCAM_Aravis::grabFrame()
{
    ArvBuffer *arv_buffer = NULL;
    int max_tries = 10;
    int tries = 0;
    for(; tries < max_tries; tries ++) {
        arv_buffer = arv_stream_timeout_pop_buffer (stream, 200000);
        if (arv_buffer != NULL && arv_buffer_get_status (arv_buffer) != ARV_BUFFER_STATUS_SUCCESS) {
            arv_stream_push_buffer (stream, arv_buffer);
        } else break;
    }

    if (tries == max_tries)
        return false;

    size_t buffer_size;
    framebuffer = (void*)arv_buffer_get_data (arv_buffer, &buffer_size);

    arv_buffer_get_image_region (arv_buffer, NULL, NULL, &width, &height);

    arv_stream_push_buffer(stream, arv_buffer);
    return true;
}
Exemple #8
0
static void aravis_stream_callback( aravis_handle_t handle, ArvStreamCallbackType type, ArvBuffer *buffer)
{
	if (type == ARV_STREAM_CALLBACK_TYPE_BUFFER_DONE){
		unicap_data_buffer_t data_buffer;
		unicap_copy_format (&data_buffer.format, &handle->current_format);
		data_buffer.buffer_size = buffer->size;
		data_buffer.data = buffer->data;
		data_buffer.type = UNICAP_BUFFER_TYPE_SYSTEM;
		data_buffer.fill_time.tv_sec =  buffer->timestamp_ns / 1000000000ULL;
		data_buffer.fill_time.tv_usec = (buffer->timestamp_ns % 1000000000ULL) / 1000ULL;
		handle->event_callback (handle->unicap_handle, UNICAP_EVENT_NEW_FRAME, &data_buffer);
		arv_stream_push_buffer (handle->stream, buffer);
	}
}
Exemple #9
0
static unicap_status_t aravis_capture_start( aravis_handle_t handle )
{
	guint payload;
	int i;
	
	handle->stream = arv_camera_create_stream( handle->camera, aravis_stream_callback, handle);
	arv_camera_set_acquisition_mode (handle->camera, ARV_ACQUISITION_MODE_CONTINUOUS);
	arv_camera_start_acquisition (handle->camera);

	payload = arv_camera_get_payload (handle->camera);
	for (i=0; i < 8; i++)
		arv_stream_push_buffer (handle->stream, arv_buffer_new (payload, NULL));
	
	return handle->stream ? STATUS_SUCCESS : STATUS_FAILURE;
}
Exemple #10
0
ArvGvStream *CreateStream(void)
{
	gboolean 		bAutoBuffer = FALSE;
	gboolean 		bPacketResend = TRUE;
	unsigned int 	timeoutPacket = 40; // milliseconds
	unsigned int 	timeoutFrameRetention = 200;

	
	ArvGvStream *pStream = (ArvGvStream *)arv_device_create_stream (global.pDevice, NULL, NULL);
	if (pStream)
	{
		ArvBuffer	*pBuffer;
		gint 		 nbytesPayload;


		if (!ARV_IS_GV_STREAM (pStream))
			ROS_WARN("Stream is not a GV_STREAM");

		if (bAutoBuffer)
			g_object_set (pStream,
					      "socket-buffer",
						  ARV_GV_STREAM_SOCKET_BUFFER_AUTO,
						  "socket-buffer-size", 0,
						  NULL);
		if (!bPacketResend)
			g_object_set (pStream,
					      "packet-resend",
						  bPacketResend ? ARV_GV_STREAM_PACKET_RESEND_ALWAYS : ARV_GV_STREAM_PACKET_RESEND_NEVER,
						  NULL);
		g_object_set (pStream,
				          "packet-timeout",
						  (unsigned) timeoutPacket * 1000,
						  "frame-retention", (unsigned) timeoutFrameRetention * 1000,
					  NULL);
	
		// Load up some buffers.
		nbytesPayload = arv_camera_get_payload (global.pCamera);
		for (int i=0; i<50; i++)
		{
			pBuffer = arv_buffer_new (nbytesPayload, NULL);
			arv_stream_push_buffer ((ArvStream *)pStream, pBuffer);
		}
	}
	return pStream;
} // CreateStream()
Exemple #11
0
int main(int argc, char *argv[])
{
    ArvDevice *device;
    ArvStream *stream;
    ArvCamera *camera;
    ArvGcFeatureNode *feature;
    guint64 n_completed_buffers;
    guint64 n_failures;
    guint64 n_underruns;
    GOptionContext *context;
    GError *error = NULL;
    void (*old_sigint_handler)(int);
    int i, payload;

    arv_g_thread_init (NULL);
    arv_g_type_init ();

    context = g_option_context_new (NULL);
    g_option_context_set_summary (context, "Test of heartbeat robustness while continuously changing a feature.");
    g_option_context_add_main_entries (context, arv_option_entries, NULL);

    if (!g_option_context_parse (context, &argc, &argv, &error)) {
	    g_option_context_free (context);
	    g_print ("Option parsing failed: %s\n", error->message);
	    g_error_free (error);
	    return EXIT_FAILURE;
    }

    g_option_context_free (context);

    arv_debug_enable (arv_option_debug_domains);

    camera = arv_camera_new (arv_option_camera_name);
    if (!ARV_IS_CAMERA (camera)) {
	    printf ("Device not found\n");
	    return EXIT_FAILURE;
    }

    device = arv_camera_get_device (camera);

    stream = arv_camera_create_stream (camera, NULL, NULL);
    if (!ARV_IS_STREAM (stream)) {
	    printf ("Invalid device\n");
    } else {
	    payload = arv_camera_get_payload (camera);

	    if (ARV_IS_GV_STREAM (stream)) {
		    g_object_set (stream,
				  //"socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO,
				  "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_FIXED,
				  "socket-buffer-size", payload*6,
				  "packet-timeout", 1000 * 1000,
				  "frame-retention", 100 * 1000,
				  "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS,
				  NULL);
	    }

	    for (i = 0; i < 100; i++)
		    arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL));

	    arv_camera_set_acquisition_mode(camera, ARV_ACQUISITION_MODE_CONTINUOUS);

	    feature = ARV_GC_FEATURE_NODE (arv_device_get_feature (device, arv_option_feature_name));

	    arv_camera_start_acquisition (camera);

	    old_sigint_handler = signal (SIGINT, set_cancel);

	    while (!cancel) {
		    ArvBuffer *buffer = arv_stream_timeout_pop_buffer(stream, 2000000);
		    if (buffer) {
			    usleep(10);
			    arv_stream_push_buffer (stream, buffer);
		    }

		    if (!(++i%5)) {
			    char *value;

			    if ((i/100) % 2 == 0)
				    value = g_strdup_printf ("%d", arv_option_min);
			    else
				    value = g_strdup_printf ("%d", arv_option_max);

			    fprintf (stderr, "Setting %s from %s to %s\n",
				     arv_option_feature_name,
				     arv_gc_feature_node_get_value_as_string (feature, NULL),
				     value);
			    arv_gc_feature_node_set_value_from_string (feature, value, NULL);

			    g_free (value);
		    }
	    }

	    signal (SIGINT, old_sigint_handler);

	    arv_stream_get_statistics (stream, &n_completed_buffers, &n_failures, &n_underruns);

	    printf ("\nCompleted buffers = %Lu\n", (unsigned long long) n_completed_buffers);
	    printf ("Failures          = %Lu\n", (unsigned long long) n_failures);
	    printf ("Underruns         = %Lu\n", (unsigned long long) n_underruns);

	    arv_camera_stop_acquisition (camera);
    }

    g_object_unref (camera);

    return 0;
}
Exemple #12
0
static GstFlowReturn
gst_aravis_create (GstPushSrc * push_src, GstBuffer ** buffer)
{
	GstAravis *gst_aravis;
	ArvBuffer *arv_buffer;
	int arv_row_stride;
	int width, height;
	char *buffer_data;
	size_t buffer_size;
	guint64 timestamp_ns;

	gst_aravis = GST_ARAVIS (push_src);

	do {
		arv_buffer = arv_stream_timeout_pop_buffer (gst_aravis->stream, gst_aravis->buffer_timeout_us);
		if (arv_buffer != NULL && arv_buffer_get_status (arv_buffer) != ARV_BUFFER_STATUS_SUCCESS)
			arv_stream_push_buffer (gst_aravis->stream, arv_buffer);
	} while (arv_buffer != NULL && arv_buffer_get_status (arv_buffer) != ARV_BUFFER_STATUS_SUCCESS);

	if (arv_buffer == NULL)
		return GST_FLOW_ERROR;

	*buffer = gst_buffer_new ();

	buffer_data = (char *) arv_buffer_get_data (arv_buffer, &buffer_size);
	arv_buffer_get_image_region (arv_buffer, NULL, NULL, &width, &height);
	arv_row_stride = width * ARV_PIXEL_FORMAT_BIT_PER_PIXEL (arv_buffer_get_image_pixel_format (arv_buffer)) / 8;
	timestamp_ns = arv_buffer_get_timestamp (arv_buffer);

	/* Gstreamer requires row stride to be a multiple of 4 */
	if ((arv_row_stride & 0x3) != 0) {
		int gst_row_stride;
		size_t size;
		void *data;
		int i;

		gst_row_stride = (arv_row_stride & ~(0x3)) + 4;

		size = height * gst_row_stride;
		data = g_malloc (size);

		for (i = 0; i < height; i++)
			memcpy (((char *) data) + i * gst_row_stride, buffer_data + i * arv_row_stride, arv_row_stride);

		GST_BUFFER_DATA (buffer) = data;
		GST_BUFFER_MALLOCDATA (buffer) = data;
		GST_BUFFER_SIZE (buffer) = size;
	} else {
		GST_BUFFER_DATA (*buffer) = buffer_data;
		GST_BUFFER_MALLOCDATA (*buffer) = NULL;
		GST_BUFFER_SIZE (*buffer) = buffer_size;
	}

	if (!gst_base_src_get_do_timestamp(GST_BASE_SRC(push_src))) {
		if (gst_aravis->timestamp_offset == 0) {
			gst_aravis->timestamp_offset = timestamp_ns;
			gst_aravis->last_timestamp = timestamp_ns;
		}

		GST_BUFFER_TIMESTAMP (*buffer) = timestamp_ns - gst_aravis->timestamp_offset;
		GST_BUFFER_DURATION (*buffer) = timestamp_ns - gst_aravis->last_timestamp;

		gst_aravis->last_timestamp = timestamp_ns;
	}

	arv_stream_push_buffer (gst_aravis->stream, arv_buffer);

	gst_buffer_set_caps (*buffer, gst_aravis->fixed_caps);

	return GST_FLOW_OK;
}
Exemple #13
0
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
	GstAravis* gst_aravis = GST_ARAVIS(src);
	GstStructure *structure;
	ArvPixelFormat pixel_format;
	int height, width;
	int bpp, depth;
	const GValue *frame_rate;
	const char *caps_string;
	unsigned int i;
	guint32 fourcc;

	GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

	arv_camera_stop_acquisition (gst_aravis->camera);

	if (gst_aravis->stream != NULL)
		g_object_unref (gst_aravis->stream);

	structure = gst_caps_get_structure (caps, 0);

	gst_structure_get_int (structure, "width", &width);
	gst_structure_get_int (structure, "height", &height);
	frame_rate = gst_structure_get_value (structure, "framerate");
	gst_structure_get_int (structure, "bpp", &bpp);
	gst_structure_get_int (structure, "depth", &depth);

	if (gst_structure_get_field_type (structure, "format") == G_TYPE_STRING) {
		const char *string;

	       	string = gst_structure_get_string (structure, "format");
		fourcc = GST_STR_FOURCC (string);
	} else if (gst_structure_get_field_type (structure, "format") == GST_TYPE_FOURCC) {
		gst_structure_get_fourcc (structure, "format", &fourcc);
	} else
		fourcc = 0;

	pixel_format = arv_pixel_format_from_gst_0_10_caps (gst_structure_get_name (structure), bpp, depth, fourcc);

	arv_camera_set_region (gst_aravis->camera, gst_aravis->offset_x, gst_aravis->offset_y, width, height);
	arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
	arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

	if (frame_rate != NULL) {
		double dbl_frame_rate;

		dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
			(double) gst_value_get_fraction_denominator (frame_rate);

		GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
		arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

		if (dbl_frame_rate > 0.0)
			gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
							     3e6 / dbl_frame_rate);
		else
			gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
	} else
		gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

	GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %" G_GUINT64_FORMAT " µs", gst_aravis->buffer_timeout_us);

	GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

	if(gst_aravis->gain_auto) {
		arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Gain = continuous");
	} else {
		if (gst_aravis->gain >= 0) {
			GST_DEBUG_OBJECT (gst_aravis, "Gain = %g", gst_aravis->gain);
			arv_camera_set_gain_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual gain = %g", arv_camera_get_gain (gst_aravis->camera));
	}

	if(gst_aravis->exposure_auto) {
		arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_CONTINUOUS);
		GST_DEBUG_OBJECT (gst_aravis, "Auto Exposure = continuous");
	} else {
		if (gst_aravis->exposure_time_us > 0.0) {
			GST_DEBUG_OBJECT (gst_aravis, "Exposure = %g µs", gst_aravis->exposure_time_us);
			arv_camera_set_exposure_time_auto (gst_aravis->camera, ARV_AUTO_OFF);
			arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
		}
		GST_DEBUG_OBJECT (gst_aravis, "Actual exposure = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));
	}

	if (gst_aravis->fixed_caps != NULL)
		gst_caps_unref (gst_aravis->fixed_caps);

	caps_string = arv_pixel_format_to_gst_0_10_caps_string (pixel_format);
	if (caps_string != NULL) {
		GstStructure *structure;
		GstCaps *caps;

		caps = gst_caps_new_empty ();
		structure = gst_structure_from_string (caps_string, NULL);
		gst_structure_set (structure,
				   "width", G_TYPE_INT, width,
				   "height", G_TYPE_INT, height,
				   NULL);

		if (frame_rate != NULL)
			gst_structure_set_value (structure, "framerate", frame_rate);

		gst_caps_append_structure (caps, structure);

		gst_aravis->fixed_caps = caps;
	} else
		gst_aravis->fixed_caps = NULL;

	gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
	gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

	if (ARV_IS_GV_STREAM (gst_aravis->stream) && gst_aravis->packet_resend)
		g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_ALWAYS, NULL);
	else
		g_object_set (gst_aravis->stream, "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);

	for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
		arv_stream_push_buffer (gst_aravis->stream,
					arv_buffer_new (gst_aravis->payload, NULL));

	GST_LOG_OBJECT (gst_aravis, "Start acquisition");
	arv_camera_start_acquisition (gst_aravis->camera);

	gst_aravis->timestamp_offset = 0;
	gst_aravis->last_timestamp = 0;

	return TRUE;
}
Exemple #14
0
static GstFlowReturn
gst_aravis_create (GstPushSrc * push_src, GstBuffer ** buffer)
{
	GstAravis *gst_aravis;
	ArvBuffer *arv_buffer;
	int arv_row_stride;

	gst_aravis = GST_ARAVIS (push_src);

	do {
		arv_buffer = arv_stream_timeout_pop_buffer (gst_aravis->stream, gst_aravis->buffer_timeout_us);
		if (arv_buffer != NULL && arv_buffer->status != ARV_BUFFER_STATUS_SUCCESS)
			arv_stream_push_buffer (gst_aravis->stream, arv_buffer);
	} while (arv_buffer != NULL && arv_buffer->status != ARV_BUFFER_STATUS_SUCCESS);

	if (arv_buffer == NULL)
		return GST_FLOW_ERROR;

	arv_row_stride = arv_buffer->width * ARV_PIXEL_FORMAT_BIT_PER_PIXEL (arv_buffer->pixel_format) / 8;

	/* Gstreamer requires row stride to be a multiple of 4 */
	if ((arv_row_stride & 0x3) != 0) {
		int gst_row_stride;
		size_t size;
		void *data;
		int i;

		gst_row_stride = (arv_row_stride & ~(0x3)) + 4;

		size = arv_buffer->height * gst_row_stride;
		data = g_malloc (size);

		for (i = 0; i < arv_buffer->height; i++)
			memcpy (((char *) data) + i * gst_row_stride, ((char *) arv_buffer->data) + i * arv_row_stride, arv_row_stride);

		*buffer = gst_buffer_new_wrapped (data, size);
	} else {
		*buffer = gst_buffer_new_wrapped_full (0,
			arv_buffer->data,
			arv_buffer->size,
			0,
			arv_buffer->size,
			NULL,
			NULL);
	}

	if (!gst_base_src_get_do_timestamp(GST_BASE_SRC(push_src))) {
		if (gst_aravis->timestamp_offset == 0) {
			gst_aravis->timestamp_offset = arv_buffer->timestamp_ns;
			gst_aravis->last_timestamp = arv_buffer->timestamp_ns;
		}

		GST_BUFFER_PTS (*buffer) = arv_buffer->timestamp_ns - gst_aravis->timestamp_offset;
		GST_BUFFER_DURATION (*buffer) = arv_buffer->timestamp_ns - gst_aravis->last_timestamp;

		gst_aravis->last_timestamp = arv_buffer->timestamp_ns;
	}

	arv_stream_push_buffer (gst_aravis->stream, arv_buffer);

	return GST_FLOW_OK;
}
int main (int argc, char **argv)
{
	
	ArvCamera * camera;
	ArvStream *stream;
	ArvBuffer *buffer;
	GOptionContext *context;
	GError *error = NULL;
	char memory_buffer[100000];
	int i;

	arv_g_thread_init (NULL);
	arv_g_type_init ();

	context = g_option_context_new (NULL);
	g_option_context_add_main_entries (context, arv_option_entries, NULL);

	if (!g_option_context_parse (context, &argc, &argv, &error)) {
		g_option_context_free (context);
		g_print ("Option parsing failed: %s\n", error->message);
		g_error_free (error);
		return EXIT_FAILURE;
	}

	g_option_context_free (context);
	if (arv_option_max_frames < 0)
		arv_option_max_errors_before_abort = -1;
	
	save_buffer_fn = GetSaveBufferFn(arv_option_save_type);

	arv_debug_enable (arv_option_debug_domains);

	if (arv_option_camera_name == NULL)
		g_print ("Looking for the first available camera\n");
	else
		g_print ("Looking for camera '%s'\n", arv_option_camera_name);

	camera = arv_camera_new (arv_option_camera_name);
	
	int errors = 0;
	if (camera == NULL)
	{
		g_print("No device found");
		return 1;
	}

	guint payload_size = arv_camera_get_payload(camera);
	g_print ("payload size  = %d (0x%x)\n", payload_size, payload_size);
	

	stream = arv_camera_create_stream (camera, NULL, NULL);
	if (arv_option_auto_buffer)
	{
		g_object_set (stream,"socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO,"socket-buffer-size", 0,NULL);
	}
	
	for (i = 0; i < 30; i++)
	{
		arv_stream_push_buffer (stream, arv_buffer_new (payload_size, NULL));
	}
	
	arv_camera_stop_acquisition(camera);

	// set the bit depth
	ArvDevice * device = arv_camera_get_device(camera);
	ArvGcNode * feature = arv_device_get_feature(device, "PixelFormat");
	char * pix_format = "Mono8";
	if (arv_option_pixel_format == 14)
		pix_format = "Mono14";
	arv_gc_feature_node_set_value_from_string(ARV_GC_FEATURE_NODE(feature), pix_format, NULL);
	if (arv_option_pixel_format == 14)
	{
		feature = arv_device_get_feature(device, "CMOSBitDepth");
		arv_gc_feature_node_set_value_from_string(ARV_GC_FEATURE_NODE(feature), "bit14bit", NULL);
	}


	signal (SIGINT, set_cancel);
	signal (SIGQUIT, set_cancel);

	int captured_frames = 0;
	guint64 timeout=1000000;
	#define _CAN_STOP (arv_option_max_frames > 0 && captured_frames >= arv_option_max_frames)
	arv_camera_start_acquisition(camera);
	do {
		g_usleep (100000);
			do  {
			buffer = arv_stream_timeout_pop_buffer (stream, timeout);
			if (buffer == NULL) break;
			ArvBufferStatus status = arv_buffer_get_status(buffer);
			fprintf(stderr, "Status is %d\n", status);
			if (status == ARV_BUFFER_STATUS_SUCCESS)
			{
		
				
				
				if (timeout > 100000) timeout -= 1000;
				errors = 0;
				if (save_buffer_fn != NULL)
				{
					struct timespec timestamp;
					clock_gettime(CLOCK_REALTIME, &timestamp);
					char filename[BUFSIZ];
					if (strcmp(arv_option_save_prefix,"") != 0)
					{
						sprintf(filename, "%s/%s%d.%s", arv_option_save_dir,arv_option_save_prefix, captured_frames, arv_option_save_type);
					}
					else
					{
						sprintf(filename, "%s/%d.%03ld.%s", arv_option_save_dir, (int)timestamp.tv_sec, (long)(timestamp.tv_nsec/1.0e6), arv_option_save_type);
					}
					if ((*save_buffer_fn)(buffer, filename) == false)
					{
						g_print("Couldn't save frame %d to %s\n", captured_frames, filename);
						set_cancel(SIGQUIT);
					}
					g_print("Saved frame %d to %s\n", captured_frames, filename);
					char latest[] = "latest.png";
					sprintf(latest, "latest.%s", arv_option_save_type);
					unlink(latest);
					symlink(filename, latest);
				}
				captured_frames++;
				g_usleep(arv_option_sample_period);
			}
			else 
			{
				if (timeout < 10000000) timeout+=1000;
				fprintf(stderr, "%d errors out of %d allowed\n", errors, arv_option_max_errors_before_abort);
				arv_camera_stop_acquisition(camera);
				if (++errors > arv_option_max_errors_before_abort && arv_option_max_errors_before_abort >= 0)
				{
					set_cancel(SIGQUIT);
				}
				else
				{
					arv_camera_start_acquisition(camera);
				}
			}
			arv_stream_push_buffer (stream, buffer);
			
			
			
		} while (!cancel && buffer != NULL && !_CAN_STOP);
	} while (!cancel && !_CAN_STOP);
	arv_camera_stop_acquisition(camera);


	guint64 n_processed_buffers; guint64 n_failures; guint64 n_underruns;
	arv_stream_get_statistics (stream, &n_processed_buffers, &n_failures, &n_underruns);
	g_print ("Processed buffers = %Lu\n", (unsigned long long) n_processed_buffers);
	g_print ("Failures          = %Lu\n", (unsigned long long) n_failures);
	g_print ("Underruns         = %Lu\n", (unsigned long long) n_underruns);


	g_object_unref (stream);
	g_object_unref (camera);
	
	
	return (errors > 0);
}
Exemple #16
0
static void NewBuffer_callback (ArvStream *pStream, ApplicationData *pApplicationdata)
{
	static uint64_t  cm = 0L;	// Camera time prev
	uint64_t  		 cn = 0L;	// Camera time now

#ifdef TUNING			
	static uint64_t  rm = 0L;	// ROS time prev
#endif
	uint64_t  		 rn = 0L;	// ROS time now

	static uint64_t	 tm = 0L;	// Calculated image time prev
	uint64_t		 tn = 0L;	// Calculated image time now
		
	static int64_t   em = 0L;	// Error prev.
	int64_t  		 en = 0L;	// Error now between calculated image time and ROS time.
	int64_t  		 de = 0L;	// derivative.
	int64_t  		 ie = 0L;	// integral.
	int64_t			 u = 0L;	// Output of controller.
	
	int64_t			 kp1 = 0L;		// Fractional gains in integer form.
	int64_t			 kp2 = 1024L;
	int64_t			 kd1 = 0L;
	int64_t			 kd2 = 1024L;
	int64_t			 ki1 = -1L;		// A gentle pull toward zero.
	int64_t			 ki2 = 1024L;

	static uint32_t	 iFrame = 0;	// Frame counter.
    
	ArvBuffer		*pBuffer;

	
#ifdef TUNING			
	std_msgs::Int64  msgInt64;
	int 			 kp = 0;
	int 			 kd = 0;
	int 			 ki = 0;
    
	if (global.phNode->hasParam(ros::this_node::getName()+"/kp"))
	{
		global.phNode->getParam(ros::this_node::getName()+"/kp", kp);
		kp1 = kp;
	}
	
	if (global.phNode->hasParam(ros::this_node::getName()+"/kd"))
	{
		global.phNode->getParam(ros::this_node::getName()+"/kd", kd);
		kd1 = kd;
	}
	
	if (global.phNode->hasParam(ros::this_node::getName()+"/ki"))
	{
		global.phNode->getParam(ros::this_node::getName()+"/ki", ki);
		ki1 = ki;
	}
#endif
	
    pBuffer = arv_stream_try_pop_buffer (pStream);
    if (pBuffer != NULL) 
    {
        if (arv_buffer_get_status(pBuffer) == ARV_BUFFER_STATUS_SUCCESS) 
        {
			sensor_msgs::Image msg;
			pApplicationdata->nBuffers++;
      size_t currSize = arv_buffer_get_image_width(pBuffer) * arv_buffer_get_image_height(pBuffer) * global.nBytesPixel;
			std::vector<uint8_t> this_data(currSize);
			memcpy(&this_data[0], arv_buffer_get_data(pBuffer, &currSize), currSize);


			// Camera/ROS Timestamp coordination.
			cn				= (uint64_t)arv_buffer_get_timestamp(pBuffer);				// Camera now
			rn	 			= ros::Time::now().toNSec();					// ROS now
			
			if (iFrame < 10)
			{
				cm = cn;
				tm  = rn;
			}
			
			// Control the error between the computed image timestamp and the ROS timestamp.
			en = (int64_t)tm + (int64_t)cn - (int64_t)cm - (int64_t)rn; // i.e. tn-rn, but calced from prior values.
			de = en-em;
			ie += en;
			u = kp1*(en/kp2) + ki1*(ie/ki2) + kd1*(de/kd2);  // kp<0, ki<0, kd>0
			
			// Compute the new timestamp.
			tn = (uint64_t)((int64_t)tm + (int64_t)cn-(int64_t)cm + u);

#ifdef TUNING			
			ROS_WARN("en=%16ld, ie=%16ld, de=%16ld, u=%16ld + %16ld + %16ld = %16ld", en, ie, de, kp1*(en/kp2), ki1*(ie/ki2), kd1*(de/kd2), u);
			ROS_WARN("cn=%16lu, rn=%16lu, cn-cm=%8ld, rn-rm=%8ld, tn-tm=%8ld, tn-rn=%ld", cn, rn, cn-cm, rn-rm, (int64_t)tn-(int64_t)tm, tn-rn);
			msgInt64.data = tn-rn; //cn-cm+tn-tm; //
			global.ppubInt64->publish(msgInt64);
			rm = rn;
#endif
			
			// Save prior values.
			cm = cn;
			tm = tn;
			em = en;
			
			// Construct the image message.
			msg.header.stamp.fromNSec(tn);
			msg.header.seq = arv_buffer_get_frame_id(pBuffer);
			msg.header.frame_id = global.config.frame_id;
			msg.width = global.widthRoi;
			msg.height = global.heightRoi;
			msg.encoding = global.pszPixelformat;
			msg.step = msg.width * global.nBytesPixel;
			msg.data = this_data;

			// get current CameraInfo data
			global.camerainfo = global.pCameraInfoManager->getCameraInfo();
			global.camerainfo.header.stamp = msg.header.stamp;
			global.camerainfo.header.seq = msg.header.seq;
			global.camerainfo.header.frame_id = msg.header.frame_id;
			global.camerainfo.width = global.widthRoi;
			global.camerainfo.height = global.heightRoi;

			global.publisher.publish(msg, global.camerainfo);
				
        }
        else
        	ROS_WARN ("Frame error: %s", szBufferStatusFromInt[arv_buffer_get_status(pBuffer)]);
	 
	 			arv_stream_push_buffer (pStream, pBuffer);
        iFrame++;
    }
} // NewBuffer_callback()
Exemple #17
0
static gboolean
gst_aravis_set_caps (GstBaseSrc *src, GstCaps *caps)
{
    GstAravis* gst_aravis = GST_ARAVIS(src);
    GstStructure *structure;
    ArvPixelFormat pixel_format;
    int height, width;
    int bpp, depth;
    const GValue *frame_rate;
    const char *caps_string;
    unsigned int i;
    guint32 fourcc;

    GST_LOG_OBJECT (gst_aravis, "Requested caps = %" GST_PTR_FORMAT, caps);

    arv_camera_stop_acquisition (gst_aravis->camera);

    if (gst_aravis->stream != NULL)
        g_object_unref (gst_aravis->stream);

    structure = gst_caps_get_structure (caps, 0);

    gst_structure_get_int (structure, "width", &width);
    gst_structure_get_int (structure, "height", &height);
    frame_rate = gst_structure_get_value (structure, "framerate");
    gst_structure_get_fourcc (structure, "format", &fourcc);
    gst_structure_get_int (structure, "bpp", &bpp);
    gst_structure_get_int (structure, "depth", &depth);

    pixel_format = arv_pixel_format_from_gst_caps (gst_structure_get_name (structure), bpp, depth, fourcc);

    arv_camera_set_region (gst_aravis->camera, 0, 0, width, height);
    arv_camera_set_binning (gst_aravis->camera, gst_aravis->h_binning, gst_aravis->v_binning);
    arv_camera_set_pixel_format (gst_aravis->camera, pixel_format);

    if (frame_rate != NULL) {
        double dbl_frame_rate;

        dbl_frame_rate = (double) gst_value_get_fraction_numerator (frame_rate) /
                         (double) gst_value_get_fraction_denominator (frame_rate);

        GST_DEBUG_OBJECT (gst_aravis, "Frame rate = %g Hz", dbl_frame_rate);
        arv_camera_set_frame_rate (gst_aravis->camera, dbl_frame_rate);

        if (dbl_frame_rate > 0.0)
            gst_aravis->buffer_timeout_us = MAX (GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT,
                                                 3e6 / dbl_frame_rate);
        else
            gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;
    } else
        gst_aravis->buffer_timeout_us = GST_ARAVIS_BUFFER_TIMEOUT_DEFAULT;

    GST_DEBUG_OBJECT (gst_aravis, "Buffer timeout = %Ld µs", gst_aravis->buffer_timeout_us);

    GST_DEBUG_OBJECT (gst_aravis, "Actual frame rate = %g Hz", arv_camera_get_frame_rate (gst_aravis->camera));

    GST_DEBUG_OBJECT (gst_aravis, "Gain       = %d", gst_aravis->gain);
    arv_camera_set_gain (gst_aravis->camera, gst_aravis->gain);
    GST_DEBUG_OBJECT (gst_aravis, "Actual gain       = %d", arv_camera_get_gain (gst_aravis->camera));

    GST_DEBUG_OBJECT (gst_aravis, "Exposure   = %g µs", gst_aravis->exposure_time_us);
    arv_camera_set_exposure_time (gst_aravis->camera, gst_aravis->exposure_time_us);
    GST_DEBUG_OBJECT (gst_aravis, "Actual exposure   = %g µs", arv_camera_get_exposure_time (gst_aravis->camera));

    if (gst_aravis->fixed_caps != NULL)
        gst_caps_unref (gst_aravis->fixed_caps);

    caps_string = arv_pixel_format_to_gst_caps_string (pixel_format);
    if (caps_string != NULL) {
        GstStructure *structure;
        GstCaps *caps;

        caps = gst_caps_new_empty ();
        structure = gst_structure_from_string (caps_string, NULL);
        gst_structure_set (structure,
                           "width", G_TYPE_INT, width,
                           "height", G_TYPE_INT, height,
                           NULL);

        if (frame_rate != NULL)
            gst_structure_set_value (structure, "framerate", frame_rate);

        gst_caps_append_structure (caps, structure);

        gst_aravis->fixed_caps = caps;
    } else
        gst_aravis->fixed_caps = NULL;

    gst_aravis->payload = arv_camera_get_payload (gst_aravis->camera);
    gst_aravis->stream = arv_camera_create_stream (gst_aravis->camera, NULL, NULL);

    for (i = 0; i < GST_ARAVIS_N_BUFFERS; i++)
        arv_stream_push_buffer (gst_aravis->stream,
                                arv_buffer_new (gst_aravis->payload, NULL));

    GST_LOG_OBJECT (gst_aravis, "Start acquisition");
    arv_camera_start_acquisition (gst_aravis->camera);

    gst_aravis->timestamp_offset = 0;
    gst_aravis->last_timestamp = 0;

    return TRUE;
}
Exemple #18
0
    bool CameraGigeAravis::grabImage(Frame &newFrame){

        ArvBuffer *arv_buffer;
        //exp = arv_camera_get_exposure_time(camera);

        arv_buffer = arv_stream_timeout_pop_buffer(stream,2000000); //us
        char *buffer_data;
        size_t buffer_size;

        if(arv_buffer == NULL){

            throw runtime_error("arv_buffer is NULL");
            return false;

        }else{

            try{

                if(arv_buffer_get_status(arv_buffer) == ARV_BUFFER_STATUS_SUCCESS){

                    //BOOST_LOG_SEV(logger, normal) << "Success to grab a frame.";

                    buffer_data = (char *) arv_buffer_get_data (arv_buffer, &buffer_size);

                    //Timestamping.
                    //string acquisitionDate = TimeDate::localDateTime(microsec_clock::universal_time(),"%Y:%m:%d:%H:%M:%S");
                    //BOOST_LOG_SEV(logger, normal) << "Date : " << acquisitionDate;
                    boost::posix_time::ptime time = boost::posix_time::microsec_clock::universal_time();
                    string acquisitionDate = to_iso_extended_string(time);
                    //BOOST_LOG_SEV(logger, normal) << "Date : " << acqDateInMicrosec;

                    Mat image;
                    CamPixFmt imgDepth = MONO8;
                    int saturateVal = 0;

                    if(pixFormat == ARV_PIXEL_FORMAT_MONO_8){

                        //BOOST_LOG_SEV(logger, normal) << "Creating Mat 8 bits ...";
                        image = Mat(mHeight, mWidth, CV_8UC1, buffer_data);
                        imgDepth = MONO8;
                        saturateVal = 255;

                    }else if(pixFormat == ARV_PIXEL_FORMAT_MONO_12){

                        //BOOST_LOG_SEV(logger, normal) << "Creating Mat 16 bits ...";
                        image = Mat(mHeight, mWidth, CV_16UC1, buffer_data);
                        imgDepth = MONO12;
                        saturateVal = 4095;

                        //double t3 = (double)getTickCount();

                        if(shiftBitsImage){

                            //BOOST_LOG_SEV(logger, normal) << "Shifting bits ...";


                                unsigned short * p;

                                for(int i = 0; i < image.rows; i++){
                                    p = image.ptr<unsigned short>(i);
                                    for(int j = 0; j < image.cols; j++)
                                        p[j] = p[j] >> 4;
                                }

                            //BOOST_LOG_SEV(logger, normal) << "Bits shifted.";

                        }

                        //t3 = (((double)getTickCount() - t3)/getTickFrequency())*1000;
                        //cout << "> Time shift : " << t3 << endl;
                    }

                    //BOOST_LOG_SEV(logger, normal) << "Creating frame object ...";
                    newFrame = Frame(image, gain, exp, acquisitionDate);
                    //BOOST_LOG_SEV(logger, normal) << "Setting date of frame ...";
                    //newFrame.setAcqDateMicro(acqDateInMicrosec);
                    //BOOST_LOG_SEV(logger, normal) << "Setting fps of frame ...";
                    newFrame.mFps = fps;
                    newFrame.mFormat = imgDepth;
                    //BOOST_LOG_SEV(logger, normal) << "Setting saturated value of frame ...";
                    newFrame.mSaturatedValue = saturateVal;
                    newFrame.mFrameNumber = frameCounter;
                    frameCounter++;

                    //BOOST_LOG_SEV(logger, normal) << "Re-pushing arv buffer in stream ...";
                    arv_stream_push_buffer(stream, arv_buffer);

                    return true;

                }else{

                    switch(arv_buffer_get_status(arv_buffer)){
Exemple #19
0
    bool CameraGigeAravis::grabInitialization(){

        frameCounter = 0;

        payload = arv_camera_get_payload (camera);
        BOOST_LOG_SEV(logger, notification) << "Camera payload : " << payload;

        pixFormat = arv_camera_get_pixel_format(camera);

        arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax);
        BOOST_LOG_SEV(logger, notification) << "Camera exposure bound min : " << exposureMin;
        BOOST_LOG_SEV(logger, notification) << "Camera exposure bound max : " << exposureMax;

        arv_camera_get_gain_bounds (camera, &gainMin, &gainMax);
        BOOST_LOG_SEV(logger, notification) << "Camera gain bound min : " << gainMin;
        BOOST_LOG_SEV(logger, notification) << "Camera gain bound max : " << gainMax;

        arv_camera_set_frame_rate(camera, 30);

        fps = arv_camera_get_frame_rate(camera);
        BOOST_LOG_SEV(logger, notification) << "Camera frame rate : " << fps;

        capsString = arv_pixel_format_to_gst_caps_string(pixFormat);
        BOOST_LOG_SEV(logger, notification) << "Camera format : " << capsString;

        gain = arv_camera_get_gain(camera);
        BOOST_LOG_SEV(logger, notification) << "Camera gain : " << gain;

        exp = arv_camera_get_exposure_time(camera);
        BOOST_LOG_SEV(logger, notification) << "Camera exposure : " << exp;

        cout << endl;

        cout << "DEVICE SELECTED : " << arv_camera_get_device_id(camera)    << endl;
        cout << "DEVICE NAME     : " << arv_camera_get_model_name(camera)   << endl;
        cout << "DEVICE VENDOR   : " << arv_camera_get_vendor_name(camera)  << endl;
        cout << "PAYLOAD         : " << payload                             << endl;
        cout << "Width           : " << mWidth                               << endl
             << "Height          : " << mHeight                              << endl;
        cout << "Exp Range       : [" << exposureMin    << " - " << exposureMax   << "]"  << endl;
        cout << "Exp             : " << exp                                 << endl;
        cout << "Gain Range      : [" << gainMin        << " - " << gainMax       << "]"  << endl;
        cout << "Gain            : " << gain                                << endl;
        cout << "Fps             : " << fps                                 << endl;
        cout << "Type            : " << capsString                         << endl;

        cout << endl;

        // Create a new stream object. Open stream on Camera.
        stream = arv_camera_create_stream(camera, NULL, NULL);

        if(stream == NULL){

            BOOST_LOG_SEV(logger, critical) << "Fail to create stream with arv_camera_create_stream()";
            return false;

        }

        if (ARV_IS_GV_STREAM(stream)){

            bool            arv_option_auto_socket_buffer   = true;
            bool            arv_option_no_packet_resend     = true;
            unsigned int    arv_option_packet_timeout       = 20;
            unsigned int    arv_option_frame_retention      = 100;

            if(arv_option_auto_socket_buffer){

                g_object_set(stream,
                            // ARV_GV_STREAM_SOCKET_BUFFER_FIXED : socket buffer is set to a given fixed value.
                            // ARV_GV_STREAM_SOCKET_BUFFER_AUTO: socket buffer is set with respect to the payload size.
                            "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO,
                            // Socket buffer size, in bytes.
                            // Allowed values: >= G_MAXULONG
                            // Default value: 0
                            "socket-buffer-size", 0, NULL);

            }

            if(arv_option_no_packet_resend){

                // # packet-resend : Enables or disables the packet resend mechanism

                // If packet resend is disabled and a packet has been lost during transmission,
                // the grab result for the returned buffer holding the image will indicate that
                // the grab failed and the image will be incomplete.
                //
                // If packet resend is enabled and a packet has been lost during transmission,
                // a request is sent to the camera. If the camera still has the packet in its
                // buffer, it will resend the packet. If there are several lost packets in a
                // row, the resend requests will be combined.

                g_object_set(stream,
                            // ARV_GV_STREAM_PACKET_RESEND_NEVER: never request a packet resend
                            // ARV_GV_STREAM_PACKET_RESEND_ALWAYS: request a packet resend if a packet was missing
                            // Default value: ARV_GV_STREAM_PACKET_RESEND_ALWAYS
                            "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);

            }

            g_object_set(stream,
                        // # packet-timeout

                        // The Packet Timeout parameter defines how long (in milliseconds) we will wait for
                        // the next expected packet before it initiates a resend request.

                        // Packet timeout, in µs.
                        // Allowed values: [1000,10000000]
                        // Default value: 40000
                        "packet-timeout",/* (unsigned) arv_option_packet_timeout * 1000*/(unsigned)40000,
                        // # frame-retention

                        // The Frame Retention parameter sets the timeout (in milliseconds) for the
                        // frame retention timer. Whenever detection of the leader is made for a frame,
                        // the frame retention timer starts. The timer resets after each packet in the
                        // frame is received and will timeout after the last packet is received. If the
                        // timer times out at any time before the last packet is received, the buffer for
                        // the frame will be released and will be indicated as an unsuccessful grab.

                        // Packet retention, in µs.
                        // Allowed values: [1000,10000000]
                        // Default value: 200000
                        "frame-retention", /*(unsigned) arv_option_frame_retention * 1000*/(unsigned) 200000,NULL);

        }else
            return false;

        // Push 50 buffer in the stream input buffer queue.
        for (int i = 0; i < 50; i++)
            arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL));

        return true;

    }
Exemple #20
0
int
main (int argc, char **argv)
{
	ApplicationData data;
	ArvCamera *camera;
	ArvStream *stream;
	ArvBuffer *buffer;
	int i;

	data.buffer_count = 0;

	/* Mandatory glib type system initialization */
	arv_g_type_init ();

	/* Instantiation of the first available camera */
	camera = arv_camera_new (NULL);

	if (camera != NULL) {
		void (*old_sigint_handler)(int);
		gint payload;
		guint software_trigger_source = 0;

		/* Set region of interrest to a 200x200 pixel area */
		arv_camera_set_region (camera, 0, 0, 200, 200);
		/* Set frame rate to 10 Hz */
		arv_camera_set_frame_rate (camera, 10.0);
		/* retrieve image payload (number of bytes per image) */
		payload = arv_camera_get_payload (camera);

		/* Create a new stream object */
		stream = arv_camera_create_stream (camera, NULL, NULL);
		if (stream != NULL) {
			/* Push 50 buffer in the stream input buffer queue */
			for (i = 0; i < 50; i++)
				arv_stream_push_buffer (stream, arv_buffer_new (payload, NULL));

			/* Start the video stream */
			arv_camera_start_acquisition (camera);

			/* Connect the new-buffer signal */
			g_signal_connect (stream, "new-buffer", G_CALLBACK (new_buffer_cb), &data);
			/* And enable emission of this signal (it's disabled by default for performance reason) */
			arv_stream_set_emit_signals (stream, TRUE);

			/* Connect the control-lost signal */
			g_signal_connect (arv_camera_get_device (camera), "control-lost",
					  G_CALLBACK (control_lost_cb), NULL);

			/* Install the callback for frame rate display */
			g_timeout_add_seconds (1, periodic_task_cb, &data);

			/* Create a new glib main loop */
			data.main_loop = g_main_loop_new (NULL, FALSE);

			old_sigint_handler = signal (SIGINT, set_cancel);

			/* Run the main loop */
			g_main_loop_run (data.main_loop);

			signal (SIGINT, old_sigint_handler);

			g_main_loop_unref (data.main_loop);

			/* Stop the video stream */
			arv_camera_stop_acquisition (camera);

			g_object_unref (stream);
		} else
			printf ("Can't create stream thread (check if the device is not already used)\n");

		g_object_unref (camera);
	} else
		printf ("No camera found\n");

	return 0;
}