Пример #1
0
static gboolean gst_imx_v4l2_buffer_pool_stop(GstBufferPool *bpool)
{
	GstImxV4l2BufferPool *pool = GST_IMX_V4L2_BUFFER_POOL(bpool);
	enum v4l2_buf_type type;
	guint i;
	gboolean ret = TRUE;

	GST_DEBUG_OBJECT(pool, "stop");

	type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_STREAMOFF, &type) < 0)
	{
		GST_ERROR_OBJECT(pool, "VIDIOC_STREAMOFF error: %s", g_strerror(errno));
		return FALSE;
	}

	for (i = 0; i < pool->num_buffers; i++)
	{
		if (pool->buffers[i])
		{
			GST_BUFFER_POOL_CLASS(gst_imx_v4l2_buffer_pool_parent_class)->release_buffer(bpool, pool->buffers[i]);
			pool->buffers[i] = NULL;
		}
	}
	g_free(pool->buffers);
	pool->buffers = NULL;

	ret = GST_BUFFER_POOL_CLASS(gst_imx_v4l2_buffer_pool_parent_class)->stop(bpool);

	return ret;
}
Пример #2
0
static gboolean gst_imx_v4l2src_start(GstBaseSrc *src)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);
	struct v4l2_format fmt;
	int fd_v4l;

	GST_LOG_OBJECT(v4l2src, "start");

	fd_v4l = gst_imx_v4l2src_capture_setup(v4l2src);
	if (fd_v4l < 0) {
		GST_ERROR_OBJECT(v4l2src, "capture_setup failed");
		return FALSE;
	}

	v4l2src->fd_obj_v4l = gst_fd_object_new(fd_v4l);

	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) {
		GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed");
		return FALSE;
	}

	GST_DEBUG_OBJECT(v4l2src, "width = %d", fmt.fmt.pix.width);
	GST_DEBUG_OBJECT(v4l2src, "height = %d", fmt.fmt.pix.height);
	GST_DEBUG_OBJECT(v4l2src, "sizeimage = %d", fmt.fmt.pix.sizeimage);
	GST_DEBUG_OBJECT(v4l2src, "pixelformat = %d", fmt.fmt.pix.pixelformat);

	v4l2src->time_per_frame = gst_util_uint64_scale_int(GST_SECOND,
			v4l2src->fps_d, v4l2src->fps_n);
	v4l2src->count = 0;

	return TRUE;
}
Пример #3
0
static gboolean gst_imx_v4l2_buffer_pool_start(GstBufferPool *bpool)
{
	GstImxV4l2BufferPool *pool = GST_IMX_V4L2_BUFFER_POOL(bpool);
	enum v4l2_buf_type type;

	GST_DEBUG_OBJECT(pool, "start");

	pool->buffers = g_new0(GstBuffer *, pool->num_buffers);
	pool->num_allocated = 0;

	if (!GST_BUFFER_POOL_CLASS(gst_imx_v4l2_buffer_pool_parent_class)->start(bpool))
	{
		GST_ERROR_OBJECT(pool, "failed to allocate start buffers");
		return FALSE;
	}

	type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_STREAMON, &type) < 0)
	{
		GST_ERROR_OBJECT(pool, "VIDIOC_STREAMON error: %s",
				g_strerror(errno));
		return FALSE;
	}

	return TRUE;
}
Пример #4
0
static gboolean gst_imx_v4l2src_decide_allocation(GstBaseSrc *bsrc,
		GstQuery *query)
{
	GstImxV4l2Src *v4l2src = GST_IMX_V4L2SRC(bsrc);
	struct v4l2_format fmt;
	GstBufferPool *pool;
	guint size, min, max;
	gboolean update;
	GstStructure *config;
	GstCaps *caps;

	gst_query_parse_allocation(query, &caps, NULL);

	/* Determine min and max */
	if (gst_query_get_n_allocation_pools(query) > 0)
	{
		gst_query_parse_nth_allocation_pool(query, 0, NULL, NULL,
				&min, &max);
		update = TRUE;
	}
	else
	{
		min = max = 0;
		update = FALSE;
	}

	if (min != 0)
		/* Need an extra buffer to capture while other buffers
		 * are downstream */
		min += 1;
	else
		min = v4l2src->queue_size;

	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) {
		GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed");
		return FALSE;
	}

	size = fmt.fmt.pix.sizeimage;

	/* no repooling; leads to stream off situation due to pool start/stop */
	pool = gst_base_src_get_buffer_pool(bsrc);
	if (!pool) {
		pool = gst_imx_v4l2_buffer_pool_new(v4l2src->fd_obj_v4l);
		config = gst_buffer_pool_get_config(pool);
		gst_buffer_pool_config_set_params(config, caps, size, min, max);
		gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);
		gst_buffer_pool_set_config(pool, config);
	}

	if (update)
		gst_query_set_nth_allocation_pool(query, 0, pool, size, min, max);
	else
		gst_query_add_allocation_pool(query, pool, size, min, max);

	gst_object_unref(pool);

	return TRUE;
}
Пример #5
0
static gboolean gst_imx_v4l2_buffer_pool_set_config(GstBufferPool *bpool, GstStructure *config)
{
	GstImxV4l2BufferPool *pool = GST_IMX_V4L2_BUFFER_POOL(bpool);
	GstVideoInfo info;
	GstCaps *caps;
	gsize size;
	guint min, max;
	struct v4l2_requestbuffers req;

	if (!gst_buffer_pool_config_get_params(config, &caps, &size, &min, &max))
	{
		GST_ERROR_OBJECT(pool, "pool configuration invalid");
		return FALSE;
	}

	if (caps == NULL)
	{
		GST_ERROR_OBJECT(pool, "configuration contains no caps");
		return FALSE;
	}

	if (!gst_video_info_from_caps(&info, caps))
	{
		GST_ERROR_OBJECT(pool, "caps cannot be parsed for video info");
		return FALSE;
	}


	GST_DEBUG_OBJECT(pool, "set_config: size %d, min %d, max %d",
			size, min, max);

	memset(&req, 0, sizeof(req));
	req.count = min;
	req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	req.memory = V4L2_MEMORY_MMAP;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_REQBUFS, &req) < 0)
	{
		GST_ERROR_OBJECT(pool, "VIDIOC_REQBUFS failed: %s",
				g_strerror(errno));
		return FALSE;
	}

	if (req.count != min)
	{
		min = req.count;
		GST_WARNING_OBJECT(pool, "using %u buffers", min);
	}

	pool->num_buffers = min;
	pool->video_info = info;
	pool->add_videometa = gst_buffer_pool_config_has_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META);

	gst_buffer_pool_config_set_params(config, caps, size, min, max);

	return GST_BUFFER_POOL_CLASS(gst_imx_v4l2_buffer_pool_parent_class)->set_config(bpool, config);
}
Пример #6
0
static GstCaps *gst_imx_v4l2src_caps_for_current_setup(GstImxV4l2VideoSrc *v4l2src)
{
	GstVideoFormat gst_fmt;
	const gchar *pixel_format = NULL;
	const gchar *interlace_mode = "progressive";
	struct v4l2_format fmt;

	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0)
	{
		GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed");
		return NULL;
	}

	switch (fmt.fmt.pix.pixelformat)
	{
		case V4L2_PIX_FMT_YUV420: /* Special Case for handling YU12 */
			pixel_format = "I420";
			break;
		case V4L2_PIX_FMT_YUYV: /* Special Case for handling YUYV */
			pixel_format = "YUY2";
			break;
		default:
			gst_fmt = gst_video_format_from_fourcc(fmt.fmt.pix.pixelformat);
			pixel_format = gst_video_format_to_string(gst_fmt);
	}

	if (v4l2src->is_tvin && !fmt.fmt.pix.field)
	{
		fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;

		GST_DEBUG_OBJECT(v4l2src, "TV decoder fix up: field = V4L2_FIELD_INTERLACED");
	}

	if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED ||
		fmt.fmt.pix.field == V4L2_FIELD_INTERLACED_TB ||
		fmt.fmt.pix.field == V4L2_FIELD_INTERLACED_BT)
	{
		interlace_mode = "interleaved";
	}

	return gst_caps_new_simple("video/x-raw",
			"format", G_TYPE_STRING, pixel_format,
			"width", G_TYPE_INT, v4l2src->capture_width,
			"height", G_TYPE_INT, v4l2src->capture_height,
			"interlace-mode", G_TYPE_STRING, interlace_mode,
			"framerate", GST_TYPE_FRACTION, v4l2src->fps_n, v4l2src->fps_d,
			"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
			NULL);
}
Пример #7
0
static int v4l2_g_ctrl(GstImxV4l2VideoSrc *v4l2src, int id, int *value)
{
	struct v4l2_control control;
	int ret;

	control.id = id;
	ret = ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_CTRL, &control);

	if (ret < 0)
		GST_LOG_OBJECT(v4l2src, "VIDIOC_G_CTRL(%s) failed", ctrl_name(id));
	else {
		GST_LOG_OBJECT(v4l2src, "VIDIOC_G_CTRL(%s) returned %d", ctrl_name(id), control.value);
		*value = control.value;
	}

	return ret;
}
Пример #8
0
static gboolean gst_imx_v4l2src_negotiate(GstBaseSrc *src)
{
	GstImxV4l2VideoSrc *v4l2src = GST_IMX_V4L2SRC(src);
	GstCaps *caps;
	GstVideoFormat gst_fmt;
	const gchar *pixel_format = NULL;
	const gchar *interlace_mode = "progressive";
	struct v4l2_format fmt;

	fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_G_FMT, &fmt) < 0) {
		GST_ERROR_OBJECT(v4l2src, "VIDIOC_G_FMT failed");
		return FALSE;
	}

	switch (fmt.fmt.pix.pixelformat) {
	case V4L2_PIX_FMT_YUV420: /* Special Case for handling YU12 */
		pixel_format = "I420";
		break;
	case V4L2_PIX_FMT_YUYV: /* Special Case for handling YUYV */
		pixel_format = "YUY2";
		break;
	default:
		gst_fmt = gst_video_format_from_fourcc(fmt.fmt.pix.pixelformat);
		pixel_format = gst_video_format_to_string(gst_fmt);
	}

	if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED)
		interlace_mode = "interleaved";

	/* not much to negotiate;
	 * we already performed setup, so that is what will be streamed */
	caps = gst_caps_new_simple("video/x-raw",
			"format", G_TYPE_STRING, pixel_format,
			"width", G_TYPE_INT, v4l2src->capture_width,
			"height", G_TYPE_INT, v4l2src->capture_height,
			"interlace-mode", G_TYPE_STRING, interlace_mode,
			"framerate", GST_TYPE_FRACTION, v4l2src->fps_n, v4l2src->fps_d,
			"pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
			NULL);

	GST_INFO_OBJECT(src, "negotiated caps %" GST_PTR_FORMAT, (gpointer)caps);

	return gst_base_src_set_caps(src, caps);
}
Пример #9
0
static inline int v4l2_s_ctrl(GstImxV4l2VideoSrc *v4l2src, int id, int value)
{
	struct v4l2_control control;
	int ret;

	GST_LOG_OBJECT(v4l2src, "VIDIOC_S_CTRL(%s, %d)", ctrl_name(id), value);

	control.id = id;
	control.value = value;
	ret = ioctl(GST_IMX_FD_OBJECT_GET_FD(v4l2src->fd_obj_v4l), VIDIOC_S_CTRL, &control);

	if (ret < 0)
		GST_LOG_OBJECT(v4l2src, "VIDIOC_S_CTRL(%s, %d) failed", ctrl_name(id), value);
	else
		GST_LOG_OBJECT(v4l2src, "VIDIOC_S_CTRL(%s, %d) succeed", ctrl_name(id), value);

	return ret;
}
Пример #10
0
static GstFlowReturn gst_imx_v4l2_buffer_pool_acquire_buffer(GstBufferPool *bpool, GstBuffer **buffer, G_GNUC_UNUSED GstBufferPoolAcquireParams *params)
{
	GstImxV4l2BufferPool *pool = GST_IMX_V4L2_BUFFER_POOL(bpool);
	struct v4l2_buffer vbuffer;
	GstBuffer *buf;
	GstImxV4l2Meta *meta;

	if (GST_BUFFER_POOL_IS_FLUSHING(bpool))
		return GST_FLOW_FLUSHING;

	memset(&vbuffer, 0, sizeof(vbuffer));
	vbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	vbuffer.memory = V4L2_MEMORY_MMAP;

	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_DQBUF, &vbuffer) < 0)
	{
		GST_ERROR_OBJECT(pool, "VIDIOC_DQBUF failed: %s", g_strerror(errno));
		return GST_FLOW_ERROR;
	}
	buf = pool->buffers[vbuffer.index];
	GST_DEBUG_OBJECT(pool, "dqbuf %u %p", vbuffer.index, (gpointer)buf);
	pool->buffers[vbuffer.index] = NULL;

	g_assert(buf);

	meta = GST_IMX_V4L2_META_GET(buf);
	g_assert(meta);

	gst_buffer_remove_all_memory(buf);
	gst_buffer_append_memory(buf,
			gst_memory_new_wrapped(0,
				meta->mem, meta->vbuffer.length, 0,
				vbuffer.bytesused, NULL, NULL));

	GST_BUFFER_TIMESTAMP(buf) = GST_TIMEVAL_TO_TIME(vbuffer.timestamp);

	*buffer = buf;

	return GST_FLOW_OK;
}
Пример #11
0
static void gst_imx_v4l2_buffer_pool_release_buffer(GstBufferPool *bpool, GstBuffer *buf)
{
	GstImxV4l2BufferPool *pool = GST_IMX_V4L2_BUFFER_POOL(bpool);
	GstImxV4l2Meta *meta;

	meta = GST_IMX_V4L2_META_GET(buf);
	if (!meta)
	{
		GST_DEBUG_OBJECT(pool, "unref copied buffer %p", (gpointer)buf);
		gst_buffer_unref(buf);
		return;
	}

	GST_DEBUG_OBJECT(pool, "qbuf %u %p", meta->vbuffer.index, (gpointer)buf);

	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_QBUF, &meta->vbuffer) < 0)
	{
		GST_ERROR("VIDIOC_QBUF error: %s",
				g_strerror(errno));
		return;
	}
	pool->buffers[meta->vbuffer.index] = buf;
}
Пример #12
0
static GstFlowReturn gst_imx_v4l2_buffer_pool_alloc_buffer(GstBufferPool *bpool, GstBuffer **buffer, G_GNUC_UNUSED GstBufferPoolAcquireParams *params)
{
	GstImxV4l2BufferPool *pool = GST_IMX_V4L2_BUFFER_POOL(bpool);
	GstBuffer *buf;
	GstImxV4l2Meta *meta;
	GstImxPhysMemMeta *phys_mem_meta;
	GstVideoInfo *info;

	buf = gst_buffer_new();
	if (buf == NULL)
	{
		GST_ERROR_OBJECT(pool, "could not create new buffer");
		return GST_FLOW_ERROR;
	}

	GST_DEBUG_OBJECT(pool, "alloc %u %p", pool->num_allocated, (gpointer)buf);

	meta = GST_IMX_V4L2_META_ADD(buf);
	meta->vbuffer.index = pool->num_allocated;
	meta->vbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	meta->vbuffer.memory = V4L2_MEMORY_MMAP;

	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
	{
		GST_ERROR_OBJECT(pool, "VIDIOC_QUERYBUF error: %s",
				g_strerror(errno));
		gst_buffer_unref(buf);
		return GST_FLOW_ERROR;
	}

	meta->mem = mmap(NULL, meta->vbuffer.length,
			PROT_READ | PROT_WRITE, MAP_SHARED, GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l),
			meta->vbuffer.m.offset);
	g_assert(meta->mem);

	/* Need to query twice to get the physical address */
	if (ioctl(GST_IMX_FD_OBJECT_GET_FD(pool->fd_obj_v4l), VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
	{
		GST_ERROR_OBJECT(pool, "VIDIOC_QUERYBUF for physical address error: %s",
				g_strerror(errno));
		gst_buffer_unref(buf);
		return GST_FLOW_ERROR;
	}

	phys_mem_meta = GST_IMX_PHYS_MEM_META_ADD(buf);
	phys_mem_meta->phys_addr = meta->vbuffer.m.offset;

	/* Safeguard to catch data loss if in any future i.MX version the types do not match */
	g_assert(meta->vbuffer.m.offset == (__u32)(phys_mem_meta->phys_addr));

	if (pool->add_videometa)
	{
		info = &pool->video_info;

		gst_buffer_add_video_meta_full(
				buf,
				GST_VIDEO_FRAME_FLAG_NONE,
				GST_VIDEO_INFO_FORMAT(info),
				GST_VIDEO_INFO_WIDTH(info),
				GST_VIDEO_INFO_HEIGHT(info),
				GST_VIDEO_INFO_N_PLANES(info),
				info->offset,
				info->stride
				);
	}

#ifdef HAVE_VIV_UPLOAD
	if (pool->add_vivuploadmeta)
	{
		gst_imx_buffer_add_vivante_gl_texture_upload_meta(buf);
	}
#endif

	pool->num_allocated++;

	*buffer = buf;

	return GST_FLOW_OK;
}