static void gst_v4l2sink_init (GstV4l2Sink * v4l2sink, GstV4l2SinkClass * klass) { v4l2sink->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2sink), V4L2_BUF_TYPE_VIDEO_OUTPUT, DEFAULT_PROP_DEVICE, gst_v4l2_get_input, gst_v4l2_set_input, NULL); /* same default value for video output device as is used for * v4l2src/capture is no good.. so lets set a saner default * (which can be overridden by the one creating the v4l2sink * after the constructor returns) */ g_object_set (v4l2sink, "device", "/dev/video1", NULL); /* number of buffers requested */ v4l2sink->num_buffers = PROP_DEF_QUEUE_SIZE; v4l2sink->probed_caps = NULL; v4l2sink->current_caps = NULL; v4l2sink->overlay_fields_set = 0; v4l2sink->phys_base = 0x10000000; v4l2sink->state = 0; }
static void gst_v4l2_transform_subinstance_init (GTypeInstance * instance, gpointer g_class) { GstV4l2TransformClass *klass = GST_V4L2_TRANSFORM_CLASS (g_class); GstV4l2Transform *self = GST_V4L2_TRANSFORM (instance); self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self), V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device, gst_v4l2_get_output, gst_v4l2_set_output, NULL); self->v4l2output->no_initial_format = TRUE; self->v4l2output->keep_aspect = FALSE; self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self), V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device, gst_v4l2_get_input, gst_v4l2_set_input, NULL); self->v4l2capture->no_initial_format = TRUE; self->v4l2output->keep_aspect = FALSE; }
static void gst_v4l2src_init (GstV4l2Src * v4l2src) { /* fixme: give an update_fps_function */ v4l2src->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2src), V4L2_BUF_TYPE_VIDEO_CAPTURE, DEFAULT_PROP_DEVICE, gst_v4l2_get_input, gst_v4l2_set_input, NULL); gst_base_src_set_format (GST_BASE_SRC (v4l2src), GST_FORMAT_TIME); gst_base_src_set_live (GST_BASE_SRC (v4l2src), TRUE); }
static void gst_v4l2_video_dec_subinstance_init (GTypeInstance * instance, gpointer g_class) { GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class); GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (instance); GstVideoDecoder *decoder = GST_VIDEO_DECODER (instance); gst_video_decoder_set_packetized (decoder, TRUE); self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self), V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device, gst_v4l2_get_output, gst_v4l2_set_output, NULL); self->v4l2output->no_initial_format = TRUE; self->v4l2output->keep_aspect = FALSE; self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self), V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device, gst_v4l2_get_input, gst_v4l2_set_input, NULL); self->v4l2capture->no_initial_format = TRUE; self->v4l2output->keep_aspect = FALSE; }
static void gst_v4l2sink_init (GstV4l2Sink * v4l2sink) { v4l2sink->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2sink), V4L2_BUF_TYPE_VIDEO_OUTPUT, DEFAULT_PROP_DEVICE, gst_v4l2_get_output, gst_v4l2_set_output, NULL); /* same default value for video output device as is used for * v4l2src/capture is no good.. so lets set a saner default * (which can be overridden by the one creating the v4l2sink * after the constructor returns) */ g_object_set (v4l2sink, "device", "/dev/video1", NULL); v4l2sink->overlay_fields_set = 0; v4l2sink->crop_fields_set = 0; }
static void gst_v4l2src_init (GstV4l2Src * v4l2src, GstV4l2SrcClass * klass) { /* fixme: give an update_fps_function */ v4l2src->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2src), V4L2_BUF_TYPE_VIDEO_CAPTURE, DEFAULT_PROP_DEVICE, gst_v4l2_get_input, gst_v4l2_set_input, NULL); /* number of buffers requested */ v4l2src->num_buffers = PROP_DEF_QUEUE_SIZE; v4l2src->always_copy = PROP_DEF_ALWAYS_COPY; v4l2src->is_capturing = FALSE; gst_base_src_set_format (GST_BASE_SRC (v4l2src), GST_FORMAT_TIME); gst_base_src_set_live (GST_BASE_SRC (v4l2src), TRUE); v4l2src->fps_d = 0; v4l2src->fps_n = 0; }
static GstV4l2Device * gst_v4l2_device_provider_probe_device (GstV4l2DeviceProvider * provider, const gchar * device_path, const gchar * device_name, GstStructure * props) { GstV4l2Object *v4l2obj; GstCaps *caps; GstV4l2Device *device = NULL; struct stat st; GstV4l2DeviceType type = GST_V4L2_DEVICE_TYPE_INVALID; g_return_val_if_fail (props != NULL, NULL); if (stat (device_path, &st) == -1) return NULL; if (!S_ISCHR (st.st_mode)) return NULL; v4l2obj = gst_v4l2_object_new ((GstElement *) provider, V4L2_BUF_TYPE_VIDEO_CAPTURE, device_path, NULL, NULL, NULL); if (!gst_v4l2_open (v4l2obj)) goto destroy; gst_structure_set (props, "device.api", G_TYPE_STRING, "v4l2", NULL); gst_structure_set (props, "device.path", G_TYPE_STRING, device_path, NULL); gst_structure_set (props, "v4l2.device.driver", G_TYPE_STRING, v4l2obj->vcap.driver, NULL); gst_structure_set (props, "v4l2.device.card", G_TYPE_STRING, v4l2obj->vcap.card, NULL); gst_structure_set (props, "v4l2.device.bus_info", G_TYPE_STRING, v4l2obj->vcap.bus_info, NULL); gst_structure_set (props, "v4l2.device.version", G_TYPE_UINT, v4l2obj->vcap.version, NULL); gst_structure_set (props, "v4l2.device.capabilities", G_TYPE_UINT, v4l2obj->vcap.capabilities, NULL); gst_structure_set (props, "v4l2.device.device_caps", G_TYPE_UINT, v4l2obj->vcap.device_caps, NULL); if (v4l2obj->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE) type = GST_V4L2_DEVICE_TYPE_SOURCE; if (v4l2obj->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT) { /* Morph it in case our initial guess was wrong */ v4l2obj->type = V4L2_BUF_TYPE_VIDEO_OUTPUT; if (type == GST_V4L2_DEVICE_TYPE_INVALID) type = GST_V4L2_DEVICE_TYPE_SINK; else /* We ignore M2M devices that are both capture and output for now * The provider is not for them */ goto close; } caps = gst_v4l2_object_get_caps (v4l2obj, NULL); if (caps == NULL) goto close; if (gst_caps_is_empty (caps)) { gst_caps_unref (caps); goto close; } device = gst_v4l2_device_new (device_path, device_name ? device_name : (gchar *) v4l2obj->vcap.card, caps, type, props); gst_caps_unref (caps); close: gst_v4l2_close (v4l2obj); destroy: gst_v4l2_object_destroy (v4l2obj); if (props) gst_structure_free (props); return device; }