Example #1
0
bool CvCaptureCAM_Aravis::open( int index )
{
    if(create(index)) {
        // fetch properties bounds
        pixelFormats = arv_camera_get_available_pixel_formats(camera, &pixelFormatsCnt);

        arv_camera_get_width_bounds(camera, &widthMin, &widthMax);
        arv_camera_get_height_bounds(camera, &heightMin, &heightMax);
        arv_camera_set_region(camera, 0, 0, widthMax, heightMax);

        if( (fpsAvailable = arv_camera_is_frame_rate_available(camera)) )
            arv_camera_get_frame_rate_bounds(camera, &fpsMin, &fpsMax);
        if( (gainAvailable = arv_camera_is_gain_available(camera)) )
            arv_camera_get_gain_bounds (camera, &gainMin, &gainMax);
        if( (exposureAvailable = arv_camera_is_exposure_time_available(camera)) )
            arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax);

        // get initial values
        pixelFormat = arv_camera_get_pixel_format(camera);
        exposure = exposureAvailable ? arv_camera_get_exposure_time(camera) : 0;
        gain = gainAvailable ? arv_camera_get_gain(camera) : 0;
        fps = arv_camera_get_frame_rate(camera);

        return startCapture();
    }
    return false;
}
Example #2
0
double CvCaptureCAM_Aravis::getProperty( int property_id ) const
{
    switch ( property_id ) {
        case CV_CAP_PROP_EXPOSURE:
            if(exposureAvailable) {
                /* exposure time in seconds, like 1/100 s */
                return arv_camera_get_exposure_time(camera) / 1e6;
            }
            break;

        case CV_CAP_PROP_FPS:
            if(fpsAvailable) {
                return arv_camera_get_frame_rate(camera);
            }
            break;

        case CV_CAP_PROP_GAIN:
            if(gainAvailable) {
                return arv_camera_get_gain(camera);
            }
            break;

        case CV_CAP_PROP_FOURCC:
            {
                ArvPixelFormat currFormat = arv_camera_get_pixel_format(camera);
                switch( currFormat ) {
                    case ARV_PIXEL_FORMAT_MONO_8:
                        return MODE_GRAY8;
                    case ARV_PIXEL_FORMAT_MONO_12:
                        return MODE_GRAY12;
                }
            }
    }
    return -1.0;
}
Example #3
0
static unicap_status_t aravis_get_format( aravis_handle_t handle, unicap_format_t *format )
{
	ArvPixelFormat pixel_fmt = arv_camera_get_pixel_format (handle->camera);

	if (!pixel_fmt)
		return STATUS_FAILURE;
	
	unicap_void_format (format);

	strcpy (format->identifier, aravis_tools_get_pixel_format_string (pixel_fmt));
	format->fourcc = aravis_tools_get_fourcc (pixel_fmt);
	format->bpp = aravis_tools_get_bpp (pixel_fmt);
	arv_camera_get_region (handle->camera, 
			       &format->size.x, 
			       &format->size.y, 
			       &format->size.width, 
			       &format->size.height);
	arv_camera_get_width_bounds (handle->camera, 
				     &format->min_size.width, 
				     &format->max_size.width);
	arv_camera_get_height_bounds (handle->camera, 
				      &format->min_size.height,
				      &format->max_size.height);	
	format->buffer_size = format->bpp * format->size.width * format->size.height / 8;
	format->buffer_type = UNICAP_BUFFER_TYPE_SYSTEM;

	return STATUS_SUCCESS;
}
Example #4
0
double CvCaptureCAM_Aravis::getProperty( int property_id ) const
{
    switch(property_id) {
        case CV_CAP_PROP_POS_MSEC:
            return (double)frameID/fps;

        case CV_CAP_PROP_FRAME_WIDTH:
            return width;

        case CV_CAP_PROP_FRAME_HEIGHT:
            return height;

        case CV_CAP_PROP_AUTO_EXPOSURE:
            return (controlExposure ? 1 : 0);

        case CV_CAP_PROP_EXPOSURE:
            if(exposureAvailable) {
                /* exposure time in seconds, like 1/100 s */
                return arv_camera_get_exposure_time(camera) / 1e6;
            }
            break;

        case CV_CAP_PROP_FPS:
            if(fpsAvailable) {
                return arv_camera_get_frame_rate(camera);
            }
            break;

        case CV_CAP_PROP_GAIN:
            if(gainAvailable) {
                return arv_camera_get_gain(camera);
            }
            break;

        case CV_CAP_PROP_FOURCC:
            {
                ArvPixelFormat currFormat = arv_camera_get_pixel_format(camera);
                switch( currFormat ) {
                    case ARV_PIXEL_FORMAT_MONO_8:
                        return MODE_Y800;
                    case ARV_PIXEL_FORMAT_MONO_12:
                        return MODE_Y12;
                }
            }
            break;

        case CV_CAP_PROP_BUFFERSIZE:
            if(stream) {
                int in, out;
                arv_stream_get_n_buffers(stream, &in, &out);
                // return number of available buffers in Aravis output queue
                return out;
            }
            break;
    }
    return -1.0;
}
Example #5
0
    bool CameraGigeAravis::grabInitialization(){

        frameCounter = 0;

        payload = arv_camera_get_payload (camera);
        BOOST_LOG_SEV(logger, notification) << "Camera payload : " << payload;

        pixFormat = arv_camera_get_pixel_format(camera);

        arv_camera_get_exposure_time_bounds (camera, &exposureMin, &exposureMax);
        BOOST_LOG_SEV(logger, notification) << "Camera exposure bound min : " << exposureMin;
        BOOST_LOG_SEV(logger, notification) << "Camera exposure bound max : " << exposureMax;

        arv_camera_get_gain_bounds (camera, &gainMin, &gainMax);
        BOOST_LOG_SEV(logger, notification) << "Camera gain bound min : " << gainMin;
        BOOST_LOG_SEV(logger, notification) << "Camera gain bound max : " << gainMax;

        arv_camera_set_frame_rate(camera, 30);

        fps = arv_camera_get_frame_rate(camera);
        BOOST_LOG_SEV(logger, notification) << "Camera frame rate : " << fps;

        capsString = arv_pixel_format_to_gst_caps_string(pixFormat);
        BOOST_LOG_SEV(logger, notification) << "Camera format : " << capsString;

        gain = arv_camera_get_gain(camera);
        BOOST_LOG_SEV(logger, notification) << "Camera gain : " << gain;

        exp = arv_camera_get_exposure_time(camera);
        BOOST_LOG_SEV(logger, notification) << "Camera exposure : " << exp;

        cout << endl;

        cout << "DEVICE SELECTED : " << arv_camera_get_device_id(camera)    << endl;
        cout << "DEVICE NAME     : " << arv_camera_get_model_name(camera)   << endl;
        cout << "DEVICE VENDOR   : " << arv_camera_get_vendor_name(camera)  << endl;
        cout << "PAYLOAD         : " << payload                             << endl;
        cout << "Width           : " << mWidth                               << endl
             << "Height          : " << mHeight                              << endl;
        cout << "Exp Range       : [" << exposureMin    << " - " << exposureMax   << "]"  << endl;
        cout << "Exp             : " << exp                                 << endl;
        cout << "Gain Range      : [" << gainMin        << " - " << gainMax       << "]"  << endl;
        cout << "Gain            : " << gain                                << endl;
        cout << "Fps             : " << fps                                 << endl;
        cout << "Type            : " << capsString                         << endl;

        cout << endl;

        // Create a new stream object. Open stream on Camera.
        stream = arv_camera_create_stream(camera, NULL, NULL);

        if(stream == NULL){

            BOOST_LOG_SEV(logger, critical) << "Fail to create stream with arv_camera_create_stream()";
            return false;

        }

        if (ARV_IS_GV_STREAM(stream)){

            bool            arv_option_auto_socket_buffer   = true;
            bool            arv_option_no_packet_resend     = true;
            unsigned int    arv_option_packet_timeout       = 20;
            unsigned int    arv_option_frame_retention      = 100;

            if(arv_option_auto_socket_buffer){

                g_object_set(stream,
                            // ARV_GV_STREAM_SOCKET_BUFFER_FIXED : socket buffer is set to a given fixed value.
                            // ARV_GV_STREAM_SOCKET_BUFFER_AUTO: socket buffer is set with respect to the payload size.
                            "socket-buffer", ARV_GV_STREAM_SOCKET_BUFFER_AUTO,
                            // Socket buffer size, in bytes.
                            // Allowed values: >= G_MAXULONG
                            // Default value: 0
                            "socket-buffer-size", 0, NULL);

            }

            if(arv_option_no_packet_resend){

                // # packet-resend : Enables or disables the packet resend mechanism

                // If packet resend is disabled and a packet has been lost during transmission,
                // the grab result for the returned buffer holding the image will indicate that
                // the grab failed and the image will be incomplete.
                //
                // If packet resend is enabled and a packet has been lost during transmission,
                // a request is sent to the camera. If the camera still has the packet in its
                // buffer, it will resend the packet. If there are several lost packets in a
                // row, the resend requests will be combined.

                g_object_set(stream,
                            // ARV_GV_STREAM_PACKET_RESEND_NEVER: never request a packet resend
                            // ARV_GV_STREAM_PACKET_RESEND_ALWAYS: request a packet resend if a packet was missing
                            // Default value: ARV_GV_STREAM_PACKET_RESEND_ALWAYS
                            "packet-resend", ARV_GV_STREAM_PACKET_RESEND_NEVER, NULL);

            }

            g_object_set(stream,
                        // # packet-timeout

                        // The Packet Timeout parameter defines how long (in milliseconds) we will wait for
                        // the next expected packet before it initiates a resend request.

                        // Packet timeout, in µs.
                        // Allowed values: [1000,10000000]
                        // Default value: 40000
                        "packet-timeout",/* (unsigned) arv_option_packet_timeout * 1000*/(unsigned)40000,
                        // # frame-retention

                        // The Frame Retention parameter sets the timeout (in milliseconds) for the
                        // frame retention timer. Whenever detection of the leader is made for a frame,
                        // the frame retention timer starts. The timer resets after each packet in the
                        // frame is received and will timeout after the last packet is received. If the
                        // timer times out at any time before the last packet is received, the buffer for
                        // the frame will be released and will be indicated as an unsuccessful grab.

                        // Packet retention, in µs.
                        // Allowed values: [1000,10000000]
                        // Default value: 200000
                        "frame-retention", /*(unsigned) arv_option_frame_retention * 1000*/(unsigned) 200000,NULL);

        }else
            return false;

        // Push 50 buffer in the stream input buffer queue.
        for (int i = 0; i < 50; i++)
            arv_stream_push_buffer(stream, arv_buffer_new(payload, NULL));

        return true;

    }