Beispiel #1
0
static gboolean gst_imx_v4l2src_start(GstBaseSrc *src)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);
	struct v4l2_format fmt;
	int fd_v4l;

	GST_LOG_OBJECT(v4l2src, "start");

	fd_v4l = gst_imx_v4l2src_capture_setup(v4l2src);
	if (fd_v4l < 0) {
		GST_ERROR_OBJECT(v4l2src, "capture_setup failed");
		return FALSE;
	}

	v4l2src->fd_obj_v4l = gst_fd_object_new(fd_v4l);

	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) {
		GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed");
		return FALSE;
	}

	GST_DEBUG_OBJECT(v4l2src, "width = %d", fmt.fmt.pix.width);
	GST_DEBUG_OBJECT(v4l2src, "height = %d", fmt.fmt.pix.height);
	GST_DEBUG_OBJECT(v4l2src, "sizeimage = %d", fmt.fmt.pix.sizeimage);
	GST_DEBUG_OBJECT(v4l2src, "pixelformat = %d", fmt.fmt.pix.pixelformat);

	v4l2src->time_per_frame = gst_util_uint64_scale_int(GST_SECOND,
			v4l2src->fps_d, v4l2src->fps_n);
	v4l2src->count = 0;

	return TRUE;
}
Beispiel #2
0
static gboolean gst_imx_v4l2src_decide_allocation(GstBaseSrc *bsrc,
		GstQuery *query)
{
	GstImxV4l2Src *v4l2src = GST_IMX_V4L2SRC(bsrc);
	struct v4l2_format fmt;
	GstBufferPool *pool;
	guint size, min, max;
	gboolean update;
	GstStructure *config;
	GstCaps *caps;

	gst_query_parse_allocation(query, &caps, NULL);

	/* Determine min and max */
	if (gst_query_get_n_allocation_pools(query) > 0)
	{
		gst_query_parse_nth_allocation_pool(query, 0, NULL, NULL,
				&min, &max);
		update = TRUE;
	}
	else
	{
		min = max = 0;
		update = FALSE;
	}

	if (min != 0)
		/* Need an extra buffer to capture while other buffers
		 * are downstream */
		min += 1;
	else
		min = v4l2src->queue_size;

	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) {
		GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed");
		return FALSE;
	}

	size = fmt.fmt.pix.sizeimage;

	/* no repooling; leads to stream off situation due to pool start/stop */
	pool = gst_base_src_get_buffer_pool(bsrc);
	if (!pool) {
		pool = gst_imx_v4l2_buffer_pool_new(v4l2src->fd_obj_v4l);
		config = gst_buffer_pool_get_config(pool);
		gst_buffer_pool_config_set_params(config, caps, size, min, max);
		gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
		gst_buffer_pool_set_config(pool, config);
	}

	if (update)
		gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
	else
		gst_query_add_allocation_pool(query, pool, size, min, max);

	gst_object_unref(pool);

	return TRUE;
}
Beispiel #3
0
static void gst_imx_v4l2src_get_property(GObject *object, guint prop_id,
		GValue *value, GParamSpec *pspec)
{
	GstImxV4l2Src *v4l2src = GST_IMX_V4L2SRC(object);

	switch (prop_id)
	{
		case IMX_V4L2SRC_CAPTURE_MODE:
			g_value_set_int(value, v4l2src->capture_mode);
			break;

		case IMX_V4L2SRC_FRAMERATE_NUM:
			g_value_set_int(value, v4l2src->fps_n);
			break;

		case IMX_V4L2SRC_INPUT:
			g_value_set_int(value, v4l2src->input);
			break;

		case IMX_V4L2SRC_DEVICE:
			g_value_set_string(value, v4l2src->devicename);
			break;

		case IMX_V4L2SRC_QUEUE_SIZE:
			g_value_set_int(value, v4l2src->queue_size);
			break;

		default:
			G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
			break;
	}
}
Beispiel #4
0
static gboolean gst_imx_v4l2src_set_focus_mode(GstPhotography *photo,
		GstPhotographyFocusMode focus_mode)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(photo);

	GST_LOG_OBJECT(v4l2src, "setting focus mode to %d", focus_mode);

	switch (focus_mode) {
	case GST_PHOTOGRAPHY_FOCUS_MODE_AUTO:
	case GST_PHOTOGRAPHY_FOCUS_MODE_MACRO:
	case GST_PHOTOGRAPHY_FOCUS_MODE_PORTRAIT:
	case GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY:
		break;
	case GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL:
	case GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_EXTENDED:
		focus_mode = GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL;
		break;
	default:
		GST_WARNING_OBJECT(v4l2src, "focus mode %d is not supported", focus_mode);
		return FALSE;
	}

	g_mutex_lock(&v4l2src->af_mutex);

	if (v4l2src->focus_mode != focus_mode) {
		v4l2src->focus_mode = focus_mode;

		if (GST_STATE(v4l2src) == GST_STATE_PAUSED || GST_STATE(v4l2src) == GST_STATE_PLAYING)
			gst_imx_v4l2src_apply_focus_settings(v4l2src, TRUE);
	}

	g_mutex_unlock(&v4l2src->af_mutex);

	return TRUE;
}
Beispiel #5
0
void gst_imx_v4l2src_set_autofocus(GstPhotography *photo, gboolean on)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(photo);
	int locks;

	g_mutex_lock(&v4l2src->af_mutex);

	if (v4l2src->af_clock_id) {
		gst_clock_id_unschedule(v4l2src->af_clock_id);
		gst_clock_id_unref(v4l2src->af_clock_id);
		v4l2src->af_clock_id = NULL;
	}

	if (v4l2src->focus_mode == GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL) {

		if (v4l2_g_ctrl(v4l2src, V4L2_CID_3A_LOCK, &locks) == 0) {
			if (on && !(locks & V4L2_LOCK_FOCUS))
				v4l2_s_ctrl(v4l2src, V4L2_CID_3A_LOCK, locks | V4L2_LOCK_FOCUS);
			else if (!on && (locks & V4L2_LOCK_FOCUS))
				v4l2_s_ctrl(v4l2src, V4L2_CID_3A_LOCK, locks & ~V4L2_LOCK_FOCUS);
		}

	} else {

		if (on) {
			if (v4l2_s_ctrl(v4l2src, V4L2_CID_AUTO_FOCUS_START, 0) == 0)
				gst_imx_v4l2src_af_check_status(v4l2src);
		} else
			v4l2_s_ctrl(v4l2src, V4L2_CID_AUTO_FOCUS_STOP, 0);
	}

	g_mutex_unlock(&v4l2src->af_mutex);
}
Beispiel #6
0
static gboolean gst_imx_v4l2src_set_caps(GstBaseSrc *src, GstCaps *caps)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);

	GST_INFO_OBJECT(v4l2src, "set caps %" GST_PTR_FORMAT, (gpointer)caps);

	return TRUE;
}
Beispiel #7
0
static void gst_imx_v4l2src_finalize(GObject *object)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(object);

	g_free(v4l2src->devicename);
	g_mutex_clear(&v4l2src->af_mutex);

	G_OBJECT_CLASS(gst_imx_v4l2src_parent_class)->finalize(object);
}
Beispiel #8
0
static gchar * gst_imx_v4l2src_uri_get_uri(GstURIHandler * handler)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(handler);

	if (v4l2src->devicename != NULL)
		return g_strdup_printf("imxv4l2://%s", v4l2src->devicename);

	return g_strdup("imxv4l2://");
}
Beispiel #9
0
static gboolean gst_imx_v4lsrc_prepare_for_capture(GstPhotography *photo,
		GstPhotographyCapturePrepared func, GstCaps *capture_caps, gpointer user_data)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(photo);

	GST_LOG_OBJECT(v4l2src, "capture_caps: %" GST_PTR_FORMAT, capture_caps);

	func(user_data, capture_caps);
	return TRUE;
}
Beispiel #10
0
static gboolean gst_imx_v4l2src_stop(GstBaseSrc *src)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);

	GST_LOG_OBJECT(v4l2src, "stop");

	gst_imx_fd_object_unref(v4l2src->fd_obj_v4l);

	return TRUE;
}
Beispiel #11
0
static gboolean gst_imx_v4l2src_get_focus_mode(GstPhotography *photo,
		GstPhotographyFocusMode *focus_mode)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(photo);

	g_mutex_lock(&v4l2src->af_mutex);
	*focus_mode = v4l2src->focus_mode;
	g_mutex_unlock(&v4l2src->af_mutex);

	return TRUE;
}
Beispiel #12
0
static gboolean gst_imx_v4l2src_uri_set_uri(GstURIHandler * handler,
		const gchar * uri, GError ** error)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(handler);
	const gchar *device = "/dev/video0";

	if (strcmp (uri, "imxv4l2://") != 0)
		device = uri + 10;

	g_object_set(v4l2src, "device", device, NULL);

	return TRUE;
}
Beispiel #13
0
static gboolean gst_imx_v4l2src_stop(GstBaseSrc *src)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);

	GST_LOG_OBJECT(v4l2src, "stop");

	g_mutex_lock(&v4l2src->af_mutex);
	gst_imx_v4l2src_apply_focus_settings(v4l2src, FALSE);
	g_mutex_unlock(&v4l2src->af_mutex);

	gst_imx_fd_object_unref(v4l2src->fd_obj_v4l);

	return TRUE;
}
Beispiel #14
0
static void gst_imx_v4l2src_set_property(GObject *object, guint prop_id,
		const GValue *value, GParamSpec *pspec)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(object);

	switch (prop_id)
	{
		case IMX_V4L2SRC_CAPTURE_MODE:
			v4l2src->capture_mode = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_FRAMERATE_NUM:
			v4l2src->fps_n = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_INPUT:
			v4l2src->input = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_DEVICE:
			if (v4l2src->devicename)
				g_free(v4l2src->devicename);
			v4l2src->devicename = g_strdup(g_value_get_string(value));
			break;

		case IMX_V4L2SRC_QUEUE_SIZE:
			v4l2src->queue_size = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_CROP_META_X:
			v4l2src->metaCropX = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_CROP_META_Y:
			v4l2src->metaCropY = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_CROP_META_WIDTH:
			v4l2src->metaCropWidth = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_CROP_META_HEIGHT:
			v4l2src->metaCropHeight = g_value_get_int(value);
			break;

		default:
			G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
			break;
	}
}
Beispiel #15
0
static gboolean gst_imx_v4l2src_negotiate(GstBaseSrc *src)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);
	GstCaps *caps;

	/* not much to negotiate;
	 * we already performed setup, so that is what will be streamed */
	caps = gst_imx_v4l2src_caps_for_current_setup(v4l2src);
	if (!caps)
		return FALSE;

	GST_INFO_OBJECT(src, "negotiated caps %" GST_PTR_FORMAT, (gpointer)caps);

	return gst_base_src_set_caps(src, caps);
}
Beispiel #16
0
static gboolean gst_imx_v4l2src_af_status_cb(GstClock *clock, GstClockTime time,
		GstClockID id, gpointer user_data)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(user_data);

	g_mutex_lock(&v4l2src->af_mutex);

	if (v4l2src->af_clock_id == id) {
		gst_clock_id_unref(v4l2src->af_clock_id);
		v4l2src->af_clock_id = NULL;

		gst_imx_v4l2src_af_check_status(v4l2src);
	}

	g_mutex_unlock(&v4l2src->af_mutex);
	return TRUE;
}
Beispiel #17
0
static gboolean gst_imx_v4l2src_negotiate(GstBaseSrc *src)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);
	GstCaps *caps;
	GstVideoFormat gst_fmt;
	const gchar *pixel_format = NULL;
	const gchar *interlace_mode = "progressive";
	struct v4l2_format fmt;

	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) {
		GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed");
		return FALSE;
	}

	switch (fmt.fmt.pix.pixelformat) {
	case V4L2_PIX_FMT_YUV420: /* Special Case for handling YU12 */
		pixel_format = "I420";
		break;
	case V4L2_PIX_FMT_YUYV: /* Special Case for handling YUYV */
		pixel_format = "YUY2";
		break;
	default:
		gst_fmt = gst_video_format_from_fourcc(fmt.fmt.pix.pixelformat);
		pixel_format = gst_video_format_to_string(gst_fmt);
	}

	if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED)
		interlace_mode = "interleaved";

	/* not much to negotiate;
	 * we already performed setup, so that is what will be streamed */
	caps = gst_caps_new_simple("video/x-raw",
			"format", G_TYPE_STRING, pixel_format,
			"width", G_TYPE_INT, v4l2src->capture_width,
			"height", G_TYPE_INT, v4l2src->capture_height,
			"interlace-mode", G_TYPE_STRING, interlace_mode,
			"framerate", GST_TYPE_FRACTION, v4l2src->fps_n, v4l2src->fps_d,
			"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
			NULL);

	GST_INFO_OBJECT(src, "negotiated caps %" GST_PTR_FORMAT, (gpointer)caps);

	return gst_base_src_set_caps(src, caps);
}
Beispiel #18
0
static GstFlowReturn gst_imx_v4l2src_fill(GstPushSrc *src, GstBuffer *buf)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);
	GstClockTime ts;

	GST_LOG_OBJECT(v4l2src, "fill");

	ts = gst_clock_get_time(GST_ELEMENT(v4l2src)->clock);
	if (ts != GST_CLOCK_TIME_NONE)
		ts -= gst_element_get_base_time(GST_ELEMENT(v4l2src));
	else
		ts = v4l2src->count * v4l2src->time_per_frame;
	v4l2src->count++;

	GST_BUFFER_TIMESTAMP(buf) = ts;
	GST_BUFFER_DURATION(buf) = v4l2src->time_per_frame;
	return GST_FLOW_OK;
}
Beispiel #19
0
static GstCaps *gst_imx_v4l2src_get_caps(GstBaseSrc *src, GstCaps *filter)
{
	GstImxV4l2Src *v4l2src = GST_IMX_V4L2SRC(src);
	GstCaps *caps;

	GST_INFO_OBJECT(v4l2src, "get caps filter %" GST_PTR_FORMAT, (gpointer)filter);

	caps = gst_caps_new_simple("video/x-raw",
			"format", G_TYPE_STRING, "I420",
			"width", GST_TYPE_INT_RANGE, 16, G_MAXINT,
			"height", GST_TYPE_INT_RANGE, 16, G_MAXINT,
			"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1,
			"pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1,
			NULL);

	GST_INFO_OBJECT(v4l2src, "get caps %" GST_PTR_FORMAT, (gpointer)caps);

	return caps;
}
Beispiel #20
0
static gboolean gst_imx_v4l2src_negotiate(GstBaseSrc *src)
{
	GstImxV4l2Src *v4l2src = GST_IMX_V4L2SRC(src);
	GstCaps *caps;

	/* not much to negotiate;
	 * we already performed setup, so that is what will be streamed */

	caps = gst_caps_new_simple("video/x-raw",
			"format", G_TYPE_STRING, "I420",
			"width", G_TYPE_INT, v4l2src->capture_width,
			"height", G_TYPE_INT, v4l2src->capture_height,
			"framerate", GST_TYPE_FRACTION, v4l2src->fps_n, v4l2src->fps_d,
			"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
			NULL);

	GST_INFO_OBJECT(src, "negotiated caps %" GST_PTR_FORMAT, (gpointer)caps);

	return gst_base_src_set_caps(src, caps);
}
Beispiel #21
0
static void gst_imx_v4l2src_get_property(GObject *object, guint prop_id,
		GValue *value, GParamSpec *pspec)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(object);

	switch (prop_id)
	{
		case IMX_V4L2SRC_CAPTURE_MODE:
			g_value_set_int(value, v4l2src->capture_mode);
			break;

		case IMX_V4L2SRC_FRAMERATE_NUM:
			g_value_set_int(value, v4l2src->fps_n);
			break;

		case IMX_V4L2SRC_INPUT:
			g_value_set_int(value, v4l2src->input);
			break;

		case IMX_V4L2SRC_DEVICE:
			g_value_set_string(value, v4l2src->devicename);
			break;

		case IMX_V4L2SRC_QUEUE_SIZE:
			g_value_set_int(value, v4l2src->queue_size);
			break;

		case IMX_V4L2SRC_CROP_META_X:
			g_value_set_int(value, v4l2src->metaCropX);
			break;

		case IMX_V4L2SRC_CROP_META_Y:
			g_value_set_int(value, v4l2src->metaCropY);
			break;

		case IMX_V4L2SRC_CROP_META_WIDTH:
			g_value_set_int(value, v4l2src->metaCropWidth);
			break;

		case IMX_V4L2SRC_CROP_META_HEIGHT:
			g_value_set_int(value, v4l2src->metaCropHeight);
			break;

		case PROP_FOCUS_MODE:
			{
				GstPhotographyFocusMode focus_mode;
				gst_imx_v4l2src_get_focus_mode(GST_PHOTOGRAPHY(v4l2src), &focus_mode);
				g_value_set_enum(value, focus_mode);
			}
			break;

		case PROP_WB_MODE:
			g_value_set_enum(value, GST_PHOTOGRAPHY_WB_MODE_AUTO);
			break;

		case PROP_COLOR_TONE:
			g_value_set_enum(value, GST_PHOTOGRAPHY_COLOR_TONE_MODE_NORMAL);
			break;

		case PROP_SCENE_MODE:
			g_value_set_enum(value, GST_TYPE_PHOTOGRAPHY_SCENE_MODE);
			break;

		case PROP_FLASH_MODE:
			g_value_set_enum(value, GST_PHOTOGRAPHY_FLASH_MODE_AUTO);
			break;

		case PROP_FLICKER_MODE:
			g_value_set_enum(value, GST_PHOTOGRAPHY_FLICKER_REDUCTION_OFF);
			break;

		case PROP_CAPABILITIES:
			g_value_set_ulong(value, GST_PHOTOGRAPHY_CAPS_NONE);
			break;

		case PROP_EV_COMP:
			g_value_set_float(value, 0.0f);
			break;

		case PROP_ISO_SPEED:
			g_value_set_uint(value, 0);
			break;

		case PROP_APERTURE:
			g_value_set_uint(value, 0);
			break;

		case PROP_EXPOSURE_TIME:
			g_value_set_uint(value, 0);
			break;

		case PROP_IMAGE_CAPTURE_SUPPORTED_CAPS:
		case PROP_IMAGE_PREVIEW_SUPPORTED_CAPS:
			if (v4l2src->fd_obj_v4l)
				gst_value_set_caps(value, gst_imx_v4l2src_caps_for_current_setup(v4l2src));
			else
				GST_DEBUG_OBJECT(v4l2src, "not connected to hardware, don't know supported caps");
			break;

		case PROP_ZOOM:
			g_value_set_float(value, 1.0f);
			break;

		case PROP_COLOR_TEMPERATURE:
			g_value_set_uint(value, 0);
			break;

		case PROP_WHITE_POINT:
			g_value_set_boxed(value, NULL);
			break;

		case PROP_ANALOG_GAIN:
			g_value_set_float(value, 1.0f);
			break;

		case PROP_LENS_FOCUS:
			g_value_set_float(value, 0.0f);
			break;

		case PROP_MIN_EXPOSURE_TIME:
			g_value_set_uint(value, 0);
			break;

		case PROP_MAX_EXPOSURE_TIME:
			g_value_set_uint(value, 0);
			break;

		case PROP_NOISE_REDUCTION:
			g_value_set_flags(value, 0);
			break;

		default:
			G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
			break;
	}
}
Beispiel #22
0
static void gst_imx_v4l2src_set_property(GObject *object, guint prop_id,
		const GValue *value, GParamSpec *pspec)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(object);

	switch (prop_id)
	{
		case IMX_V4L2SRC_CAPTURE_MODE:
			v4l2src->capture_mode = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_FRAMERATE_NUM:
			v4l2src->fps_n = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_INPUT:
			v4l2src->input = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_DEVICE:
			if (v4l2src->devicename)
				g_free(v4l2src->devicename);
			v4l2src->devicename = g_strdup(g_value_get_string(value));
			break;

		case IMX_V4L2SRC_QUEUE_SIZE:
			v4l2src->queue_size = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_CROP_META_X:
			v4l2src->metaCropX = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_CROP_META_Y:
			v4l2src->metaCropY = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_CROP_META_WIDTH:
			v4l2src->metaCropWidth = g_value_get_int(value);
			break;

		case IMX_V4L2SRC_CROP_META_HEIGHT:
			v4l2src->metaCropHeight = g_value_get_int(value);
			break;

		case PROP_FOCUS_MODE:
			gst_imx_v4l2src_set_focus_mode(GST_PHOTOGRAPHY(v4l2src), g_value_get_enum(value));
			break;

		case PROP_WB_MODE:
		case PROP_COLOR_TONE:
		case PROP_SCENE_MODE:
		case PROP_FLASH_MODE:
		case PROP_FLICKER_MODE:
		case PROP_CAPABILITIES:
		case PROP_EV_COMP:
		case PROP_ISO_SPEED:
		case PROP_APERTURE:
		case PROP_EXPOSURE_TIME:
		case PROP_IMAGE_CAPTURE_SUPPORTED_CAPS:
		case PROP_IMAGE_PREVIEW_SUPPORTED_CAPS:
		case PROP_ZOOM:
		case PROP_COLOR_TEMPERATURE:
		case PROP_WHITE_POINT:
		case PROP_ANALOG_GAIN:
		case PROP_LENS_FOCUS:
		case PROP_MIN_EXPOSURE_TIME:
		case PROP_MAX_EXPOSURE_TIME:
		case PROP_NOISE_REDUCTION:
			GST_WARNING_OBJECT(v4l2src, "setting GstPhotography properties is not supported");
			break;

		default:
			G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
			break;
	}
}