Пример #1
0
static void video_stop_streaming(struct vb2_queue *q)
{
	struct camss_video *video = vb2_get_drv_priv(q);
	struct video_device *vdev = &video->vdev;
	struct media_entity *entity;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;

	entity = &vdev->entity;
	while (1) {
		pad = &entity->pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		pad = media_entity_remote_pad(pad);
		if (!pad || !is_media_entity_v4l2_subdev(pad->entity))
			break;

		entity = pad->entity;
		subdev = media_entity_to_v4l2_subdev(entity);

		v4l2_subdev_call(subdev, video, s_stream, 0);
	}

	media_pipeline_stop(&vdev->entity);

	video->ops->flush_buffers(video, VB2_BUF_STATE_ERROR);
}
Пример #2
0
static int __find_timeperframe(struct media_entity_pad *pad, 
			       struct v4l2_subdev *subdev,
			       struct v4l2_subdev_frame_interval *fi,
			       int depth) {
	int err;
	unsigned int i;
	if(depth > 16) /* max depth. if this is reached, then bail the search */
		return -EINVAL;

	err = v4l2_subdev_call(subdev, video, g_frame_interval, fi);
	if(err < 0) {
		/* Trace backwards through the pipeline to find a subdev
		   with the frame interval set. */
		struct media_entity_pad *_pad;
		struct v4l2_subdev *_subdev;
		for(i=0; i<pad->entity->num_pads; ++i) {
			_pad = pad->entity->pads + i;
			if(i==pad->index || _pad->type != MEDIA_PAD_TYPE_INPUT) 
				continue;
			_pad = media_entity_remote_pad(_pad);
			if(!_pad || _pad->entity->type != MEDIA_ENTITY_TYPE_SUBDEV)
				continue;
			_subdev = media_entity_to_v4l2_subdev(_pad->entity);
			err = __find_timeperframe(_pad, _subdev, fi, depth+1);
			if(err < 0)
				continue;
			else
				return err;
		}
		return -EINVAL; /* search failed to find any frame intervals */
	} else {
		return err;
	}
}
Пример #3
0
/* get the format set at output pad of the adjacent subdev */
static int
__vpfe_video_get_format(struct vpfe_video_device *video,
			struct v4l2_format *format)
{
	struct v4l2_subdev_format fmt;
	struct v4l2_subdev *subdev;
	struct media_pad *remote;
	u32 pad;
	int ret;

	subdev = vpfe_video_remote_subdev(video, &pad);
	if (subdev == NULL)
		return -EINVAL;

	fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
	remote = media_entity_remote_pad(&video->pad);
	fmt.pad = remote->index;

	ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt);
	if (ret == -ENOIOCTLCMD)
		return -EINVAL;

	format->type = video->type;
	/* convert mbus_format to v4l2_format */
	v4l2_fill_pix_format(&format->fmt.pix, &fmt.format);
	mbus_to_pix(&fmt.format, &format->fmt.pix);

	return 0;
}
Пример #4
0
/**
 * xvip_pipeline_start_stop - Start ot stop streaming on a pipeline
 * @pipe: The pipeline
 * @start: Start (when true) or stop (when false) the pipeline
 *
 * Walk the entities chain starting at the pipeline output video node and start
 * or stop all of them.
 *
 * Return: 0 if successful, or the return value of the failed video::s_stream
 * operation otherwise.
 */
static int xvip_pipeline_start_stop(struct xvip_pipeline *pipe, bool start)
{
	struct xvip_dma *dma = pipe->output;
	struct media_entity *entity;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;
	int ret;

	entity = &dma->video.entity;
	pad = NULL;

	while (1) {
		pad = xvip_get_entity_sink(entity, pad);
		if (pad == NULL)
			break;

		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		pad = media_entity_remote_pad(pad);
		if (pad == NULL ||
		    media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
			break;

		entity = pad->entity;
		subdev = media_entity_to_v4l2_subdev(entity);

		ret = v4l2_subdev_call(subdev, video, s_stream, start);
		if (start && ret < 0 && ret != -ENOIOCTLCMD)
			return ret;
	}

	return 0;
}
Пример #5
0
/**
 * fimc_pipeline_prepare - update pipeline information with subdevice pointers
 * @me: media entity terminating the pipeline
 *
 * Caller holds the graph mutex.
 */
static void fimc_pipeline_prepare(struct fimc_pipeline *p,
					struct media_entity *me)
{
	struct fimc_md *fmd = entity_to_fimc_mdev(me);
	struct v4l2_subdev *sd;
	struct v4l2_subdev *sensor = NULL;
	int i;

	for (i = 0; i < IDX_MAX; i++)
		p->subdevs[i] = NULL;

	while (1) {
		struct media_pad *pad = NULL;

		/* Find remote source pad */
		for (i = 0; i < me->num_pads; i++) {
			struct media_pad *spad = &me->pads[i];
			if (!(spad->flags & MEDIA_PAD_FL_SINK))
				continue;
			pad = media_entity_remote_pad(spad);
			if (pad)
				break;
		}

		if (pad == NULL ||
		    media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
			break;
		sd = media_entity_to_v4l2_subdev(pad->entity);

		switch (sd->grp_id) {
		case GRP_ID_SENSOR:
			sensor = sd;
			/* fall through */
		case GRP_ID_FIMC_IS_SENSOR:
			p->subdevs[IDX_SENSOR] = sd;
			break;
		case GRP_ID_CSIS:
			p->subdevs[IDX_CSIS] = sd;
			break;
		case GRP_ID_FLITE:
			p->subdevs[IDX_FLITE] = sd;
			break;
		case GRP_ID_FIMC:
			p->subdevs[IDX_FIMC] = sd;
			break;
		case GRP_ID_FIMC_IS:
			p->subdevs[IDX_IS_ISP] = sd;
			break;
		default:
			break;
		}
		me = &sd->entity;
		if (me->num_pads == 1)
			break;
	}

	if (sensor && p->subdevs[IDX_FIMC])
		__setup_sensor_notification(fmd, sensor, p->subdevs[IDX_FIMC]);
}
Пример #6
0
/* get the subdev which is connected to the output video node */
static struct v4l2_subdev *
vpfe_video_remote_subdev(struct vpfe_video_device *video, u32 *pad)
{
	struct media_pad *remote = media_entity_remote_pad(&video->pad);

	if (remote == NULL || remote->entity->type != MEDIA_ENT_T_V4L2_SUBDEV)
		return NULL;
	if (pad)
		*pad = remote->index;
	return media_entity_to_v4l2_subdev(remote->entity);
}
Пример #7
0
/**
 * Validate a pipeline by checking both ends of all links for format
 * discrepancies.
 *
 * Return 0 if all formats match, or -EPIPE if at least one link is found with
 * different formats on its two ends.
 */
static int vpfe_video_validate_pipeline(struct vpfe_pipeline *pipe)
{
	struct v4l2_subdev_format fmt_source;
	struct v4l2_subdev_format fmt_sink;
	struct v4l2_subdev *subdev;
	struct media_pad *pad;
	int ret;

	/*
	 * Should not matter if it is output[0] or 1 as
	 * the general ideas is to traverse backwards and
	 * the fact that the out video node always has the
	 * format of the connected pad.
	 */
	subdev = vpfe_video_remote_subdev(pipe->outputs[0], NULL);
	if (subdev == NULL)
		return -EPIPE;

	while (1) {
		/* Retrieve the sink format */
		pad = &subdev->entity.pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		fmt_sink.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		fmt_sink.pad = pad->index;
		ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL,
				       &fmt_sink);

		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		/* Retrieve the source format */
		pad = media_entity_remote_pad(pad);
		if (pad == NULL ||
			pad->entity->type != MEDIA_ENT_T_V4L2_SUBDEV)
			break;

		subdev = media_entity_to_v4l2_subdev(pad->entity);

		fmt_source.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		fmt_source.pad = pad->index;
		ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt_source);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		/* Check if the two ends match */
		if (fmt_source.format.code != fmt_sink.format.code ||
		    fmt_source.format.width != fmt_sink.format.width ||
		    fmt_source.format.height != fmt_sink.format.height)
			return -EPIPE;
	}
	return 0;
}
Пример #8
0
/* get the subdev which is connected to the output video node */
static struct v4l2_subdev *
vpfe_video_remote_subdev(struct vpfe_video_device *video, u32 *pad)
{
	struct media_pad *remote = media_entity_remote_pad(&video->pad);

	if (!remote || !is_media_entity_v4l2_subdev(remote->entity))
		return NULL;
	if (pad)
		*pad = remote->index;
	return media_entity_to_v4l2_subdev(remote->entity);
}
Пример #9
0
/*
 * iss_pipeline_enable - Enable streaming on a pipeline
 * @pipe: ISS pipeline
 * @mode: Stream mode (single shot or continuous)
 *
 * Walk the entities chain starting at the pipeline output video node and start
 * all modules in the chain in the given mode.
 *
 * Return 0 if successful, or the return value of the failed video::s_stream
 * operation otherwise.
 */
static int iss_pipeline_enable(struct iss_pipeline *pipe,
			       enum iss_pipeline_stream_state mode)
{
	struct iss_device *iss = pipe->output->iss;
	struct media_entity *entity;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;
	unsigned long flags;
	int ret;

	/* If one of the entities in the pipeline has crashed it will not work
	 * properly. Refuse to start streaming in that case. This check must be
	 * performed before the loop below to avoid starting entities if the
	 * pipeline won't start anyway (those entities would then likely fail to
	 * stop, making the problem worse).
	 */
	if (media_entity_enum_intersects(&pipe->ent_enum, &iss->crashed))
		return -EIO;

	spin_lock_irqsave(&pipe->lock, flags);
	pipe->state &= ~(ISS_PIPELINE_IDLE_INPUT | ISS_PIPELINE_IDLE_OUTPUT);
	spin_unlock_irqrestore(&pipe->lock, flags);

	pipe->do_propagation = false;

	entity = &pipe->output->video.entity;
	while (1) {
		pad = &entity->pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		pad = media_entity_remote_pad(pad);
		if (!pad || !is_media_entity_v4l2_subdev(pad->entity))
			break;

		entity = pad->entity;
		subdev = media_entity_to_v4l2_subdev(entity);

		ret = v4l2_subdev_call(subdev, video, s_stream, mode);
		if (ret < 0 && ret != -ENOIOCTLCMD) {
			iss_pipeline_disable(pipe, entity);
			return ret;
		}

		if (subdev == &iss->csi2a.subdev ||
		    subdev == &iss->csi2b.subdev)
			pipe->do_propagation = true;
	}

	iss_print_status(pipe->output->iss);
	return 0;
}
Пример #10
0
/*
 * Validate a pipeline by checking both ends of all links for format
 * discrepancies.
 *
 * Return 0 if all formats match, or -EPIPE if at least one link is found with
 * different formats on its two ends.
 */
static int
isp_video_validate_pipeline(struct isp_pipeline *pipe)
{
	struct v4l2_mbus_framefmt fmt_source;
	struct v4l2_mbus_framefmt fmt_sink;
	struct media_entity_pad *pad;
	struct v4l2_subdev *subdev;
	int ret;

	subdev = isp_video_remote_subdev(pipe->output, NULL);
	if (subdev == NULL)
		return -EPIPE;

	while (1) {
		/* Retrieve the sink format */
		pad = &subdev->entity.pads[0];
		if (pad->type != MEDIA_PAD_TYPE_INPUT)
			break;

		ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, pad->index,
				       &fmt_sink, V4L2_SUBDEV_FORMAT_ACTIVE);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		/* Retrieve the source format */
		pad = media_entity_remote_pad(pad);
		if (pad == NULL ||
		    pad->entity->type != MEDIA_ENTITY_TYPE_SUBDEV)
			break;

		subdev = media_entity_to_v4l2_subdev(pad->entity);

		ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, pad->index,
				       &fmt_source, V4L2_SUBDEV_FORMAT_ACTIVE);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		/* Check if the two ends match */
		if (fmt_source.code != fmt_sink.code ||
		    fmt_source.width != fmt_sink.width ||
		    fmt_source.height != fmt_sink.height) {
			printk(KERN_ERR "%s format mismatch\n", __func__);
			printk(KERN_ERR "  code   source=%d sink=%d\n", fmt_source.code, fmt_sink.code);
			printk(KERN_ERR "  width  source=%d sink=%d\n", fmt_source.width, fmt_sink.width);
			printk(KERN_ERR "  height source=%d sink=%d\n", fmt_source.height, fmt_sink.height);

			return -EPIPE;
		}
	}

	return 0;
}
Пример #11
0
/* get v4l2 subdev pointer to external subdev which is active */
static struct media_entity *vpfe_get_input_entity
			(struct vpfe_video_device *video)
{
	struct vpfe_device *vpfe_dev = video->vpfe_dev;
	struct media_pad *remote;

	remote = media_entity_remote_pad(&vpfe_dev->vpfe_isif.pads[0]);
	if (remote == NULL) {
		pr_err("Invalid media connection to isif/ccdc\n");
		return NULL;
	}
	return remote->entity;
}
Пример #12
0
/*
 * iss_pipeline_is_last - Verify if entity has an enabled link to the output
 *			  video node
 * @me: ISS module's media entity
 *
 * Returns 1 if the entity has an enabled link to the output video node or 0
 * otherwise. It's true only while pipeline can have no more than one output
 * node.
 */
static int iss_pipeline_is_last(struct media_entity *me)
{
	struct iss_pipeline *pipe;
	struct media_pad *pad;

	if (!me->pipe)
		return 0;
	pipe = to_iss_pipeline(me);
	if (pipe->stream_state == ISS_PIPELINE_STREAM_STOPPED)
		return 0;
	pad = media_entity_remote_pad(&pipe->output->pad);
	return pad->entity == me;
}
Пример #13
0
static struct v4l2_subdev *
isp_video_remote_subdev(struct isp_video *video, u32 *pad)
{
	struct media_entity_pad *remote;

	remote = media_entity_remote_pad(&video->pad);

	if (remote == NULL || remote->entity->type != MEDIA_ENTITY_TYPE_SUBDEV)
		return NULL;

	if (pad)
		*pad = remote->index;

	return media_entity_to_v4l2_subdev(remote->entity);
}
Пример #14
0
static struct v4l2_subdev *
xvip_dma_remote_subdev(struct media_pad *local, u32 *pad)
{
	struct media_pad *remote;

	remote = media_entity_remote_pad(local);
	if (remote == NULL ||
	    media_entity_type(remote->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
		return NULL;

	if (pad)
		*pad = remote->index;

	return media_entity_to_v4l2_subdev(remote->entity);
}
Пример #15
0
static int fimc_pipeline_validate(struct fimc_lite *fimc)
{
	struct v4l2_subdev *sd = &fimc->subdev;
	struct v4l2_subdev_format sink_fmt, src_fmt;
	struct media_pad *pad;
	int ret;

	while (1) {
		/* Retrieve format at the sink pad */
		pad = &sd->entity.pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;
		/* Don't call FIMC subdev operation to avoid nested locking */
		if (sd == &fimc->subdev) {
			struct flite_frame *ff = &fimc->out_frame;
			sink_fmt.format.width = ff->f_width;
			sink_fmt.format.height = ff->f_height;
			sink_fmt.format.code = fimc->inp_frame.fmt->mbus_code;
		} else {
			sink_fmt.pad = pad->index;
			sink_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
			ret = v4l2_subdev_call(sd, pad, get_fmt, NULL,
					       &sink_fmt);
			if (ret < 0 && ret != -ENOIOCTLCMD)
				return -EPIPE;
		}
		/* Retrieve format at the source pad */
		pad = media_entity_remote_pad(pad);
		if (pad == NULL ||
		    media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
			break;

		sd = media_entity_to_v4l2_subdev(pad->entity);
		src_fmt.pad = pad->index;
		src_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &src_fmt);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		if (src_fmt.format.width != sink_fmt.format.width ||
		    src_fmt.format.height != sink_fmt.format.height ||
		    src_fmt.format.code != sink_fmt.format.code)
			return -EPIPE;
	}
	return 0;
}
Пример #16
0
static int video_start_streaming(struct vb2_queue *q, unsigned int count)
{
	struct camss_video *video = vb2_get_drv_priv(q);
	struct video_device *vdev = &video->vdev;
	struct media_entity *entity;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;
	int ret;

	ret = media_pipeline_start(&vdev->entity, &video->pipe);
	if (ret < 0)
		return ret;

	ret = video_check_format(video);
	if (ret < 0)
		goto error;

	entity = &vdev->entity;
	while (1) {
		pad = &entity->pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		pad = media_entity_remote_pad(pad);
		if (!pad || !is_media_entity_v4l2_subdev(pad->entity))
			break;

		entity = pad->entity;
		subdev = media_entity_to_v4l2_subdev(entity);

		ret = v4l2_subdev_call(subdev, video, s_stream, 1);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			goto error;
	}

	return 0;

error:
	media_pipeline_stop(&vdev->entity);

	video->ops->flush_buffers(video, VB2_BUF_STATE_QUEUED);

	return ret;
}
Пример #17
0
/*
 * iss_pipeline_disable - Disable streaming on a pipeline
 * @pipe: ISS pipeline
 * @until: entity at which to stop pipeline walk
 *
 * Walk the entities chain starting at the pipeline output video node and stop
 * all modules in the chain. Wait synchronously for the modules to be stopped if
 * necessary.
 *
 * If the until argument isn't NULL, stop the pipeline walk when reaching the
 * until entity. This is used to disable a partially started pipeline due to a
 * subdev start error.
 */
static int iss_pipeline_disable(struct iss_pipeline *pipe,
				struct media_entity *until)
{
	struct iss_device *iss = pipe->output->iss;
	struct media_entity *entity;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;
	int failure = 0;
	int ret;

	entity = &pipe->output->video.entity;
	while (1) {
		pad = &entity->pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		pad = media_entity_remote_pad(pad);
		if (!pad || !is_media_entity_v4l2_subdev(pad->entity))
			break;

		entity = pad->entity;
		if (entity == until)
			break;

		subdev = media_entity_to_v4l2_subdev(entity);
		ret = v4l2_subdev_call(subdev, video, s_stream, 0);
		if (ret < 0) {
			dev_warn(iss->dev, "%s: module stop timeout.\n",
				 subdev->name);
			/* If the entity failed to stopped, assume it has
			 * crashed. Mark it as such, the ISS will be reset when
			 * applications will release it.
			 */
			media_entity_enum_set(&iss->crashed, &subdev->entity);
			failure = -ETIMEDOUT;
		}
	}

	return failure;
}
Пример #18
0
/* Called with the media graph mutex held or entity->stream_count > 0. */
struct v4l2_subdev *fimc_find_remote_sensor(struct media_entity *entity)
{
	struct media_pad *pad = &entity->pads[0];
	struct v4l2_subdev *sd;

	while (pad->flags & MEDIA_PAD_FL_SINK) {
		/* source pad */
		pad = media_entity_remote_pad(pad);
		if (pad == NULL ||
		    media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
			break;

		sd = media_entity_to_v4l2_subdev(pad->entity);

		if (sd->grp_id == GRP_ID_FIMC_IS_SENSOR ||
		    sd->grp_id == GRP_ID_SENSOR)
			return sd;
		/* sink pad */
		pad = &sd->entity.pads[0];
	}
	return NULL;
}
Пример #19
0
/*
 * iss_pipeline_disable - Disable streaming on a pipeline
 * @pipe: ISS pipeline
 *
 * Walk the entities chain starting at the pipeline output video node and stop
 * all modules in the chain. Wait synchronously for the modules to be stopped if
 * necessary.
 */
static int iss_pipeline_disable(struct iss_pipeline *pipe)
{
	struct iss_device *iss = pipe->output->iss;
	struct media_entity *entity;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;
	int failure = 0;
	int ret;

	entity = &pipe->output->video.entity;
	while (1) {
		pad = &entity->pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		pad = media_entity_remote_pad(pad);
		if (pad == NULL ||
		    media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
			break;

		entity = pad->entity;
		subdev = media_entity_to_v4l2_subdev(entity);

		ret = v4l2_subdev_call(subdev, video, s_stream, 0);
		if (ret < 0) {
			dev_dbg(iss->dev, "%s: module stop timeout.\n",
				subdev->name);
			/* If the entity failed to stopped, assume it has
			 * crashed. Mark it as such, the ISS will be reset when
			 * applications will release it.
			 */
			iss->crashed |= 1U << subdev->entity.id;
			failure = -ETIMEDOUT;
		}
	}

	return failure;
}
Пример #20
0
/*
 * Check for source/sink format differences at each link.
 * Return 0 if the formats match or -EPIPE otherwise.
 */
static int isp_video_pipeline_validate(struct fimc_isp *isp)
{
	struct v4l2_subdev *sd = &isp->subdev;
	struct v4l2_subdev_format sink_fmt, src_fmt;
	struct media_pad *pad;
	int ret;

	while (1) {
		/* Retrieve format at the sink pad */
		pad = &sd->entity.pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;
		sink_fmt.pad = pad->index;
		sink_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &sink_fmt);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		/* Retrieve format at the source pad */
		pad = media_entity_remote_pad(pad);
		if (!pad || !is_media_entity_v4l2_subdev(pad->entity))
			break;

		sd = media_entity_to_v4l2_subdev(pad->entity);
		src_fmt.pad = pad->index;
		src_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &src_fmt);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		if (src_fmt.format.width != sink_fmt.format.width ||
		    src_fmt.format.height != sink_fmt.format.height ||
		    src_fmt.format.code != sink_fmt.format.code)
			return -EPIPE;
	}

	return 0;
}
Пример #21
0
/* updates external subdev(sensor/decoder) which is active */
static int vpfe_update_current_ext_subdev(struct vpfe_video_device *video)
{
	struct vpfe_device *vpfe_dev = video->vpfe_dev;
	struct vpfe_config *vpfe_cfg;
	struct v4l2_subdev *subdev;
	struct media_pad *remote;
	int i;

	remote = media_entity_remote_pad(&vpfe_dev->vpfe_isif.pads[0]);
	if (remote == NULL) {
		pr_err("Invalid media connection to isif/ccdc\n");
		return -EINVAL;
	}

	subdev = media_entity_to_v4l2_subdev(remote->entity);
	vpfe_cfg = vpfe_dev->pdev->platform_data;
	for (i = 0; i < vpfe_cfg->num_subdevs; i++) {
		if (!strcmp(vpfe_cfg->sub_devs[i].module_name, subdev->name)) {
			video->current_ext_subdev = &vpfe_cfg->sub_devs[i];
			break;
		}
	}

	/* if user not linked decoder/sensor to isif/ccdc */
	if (i == vpfe_cfg->num_subdevs) {
		pr_err("Invalid media chain connection to isif/ccdc\n");
		return -EINVAL;
	}
	/* find the v4l2 subdev pointer */
	for (i = 0; i < vpfe_dev->num_ext_subdevs; i++) {
		if (!strcmp(video->current_ext_subdev->module_name,
			vpfe_dev->sd[i]->name))
			video->current_ext_subdev->subdev = vpfe_dev->sd[i];
	}
	return 0;
}
Пример #22
0
static int csi2_configure(struct isp_csi2_device *csi2)
{
    const struct isp_v4l2_subdevs_group *pdata;
    struct isp_device *isp = csi2->isp;
    struct isp_csi2_timing_cfg *timing = &csi2->timing[0];
    struct v4l2_subdev *sensor;
    struct media_pad *pad;

    /*
     * CSI2 fields that can be updated while the context has
     * been enabled or the interface has been enabled are not
     * updated dynamically currently. So we do not allow to
     * reconfigure if either has been enabled
     */
    if (csi2->contexts[0].enabled || csi2->ctrl.if_enable)
        return -EBUSY;

    pad = media_entity_remote_pad(&csi2->pads[CSI2_PAD_SINK]);
    sensor = media_entity_to_v4l2_subdev(pad->entity);
    pdata = sensor->host_priv;

    csi2->frame_skip = 0;
    v4l2_subdev_call(sensor, sensor, g_skip_frames, &csi2->frame_skip);

    csi2->ctrl.vp_out_ctrl = pdata->bus.csi2.vpclk_div;
    csi2->ctrl.frame_mode = ISP_CSI2_FRAME_IMMEDIATE;
    csi2->ctrl.ecc_enable = pdata->bus.csi2.crc;

    timing->ionum = 1;
    timing->force_rx_mode = 1;
    timing->stop_state_16x = 1;
    timing->stop_state_4x = 1;
    timing->stop_state_counter = 0x1FF;

    /*
     * The CSI2 receiver can't do any format conversion except DPCM
     * decompression, so every set_format call configures both pads
     * and enables DPCM decompression as a special case:
     */
    if (csi2->formats[CSI2_PAD_SINK].code !=
            csi2->formats[CSI2_PAD_SOURCE].code)
        csi2->dpcm_decompress = true;
    else
        csi2->dpcm_decompress = false;

    csi2->contexts[0].format_id = csi2_ctx_map_format(csi2);

    if (csi2->video_out.bpl_padding == 0)
        csi2->contexts[0].data_offset = 0;
    else
        csi2->contexts[0].data_offset = csi2->video_out.bpl_value;

    /*
     * Enable end of frame and end of line signals generation for
     * context 0. These signals are generated from CSI2 receiver to
     * qualify the last pixel of a frame and the last pixel of a line.
     * Without enabling the signals CSI2 receiver writes data to memory
     * beyond buffer size and/or data line offset is not handled correctly.
     */
    csi2->contexts[0].eof_enabled = 1;
    csi2->contexts[0].eol_enabled = 1;

    csi2_irq_complexio1_set(isp, csi2, 1);
    csi2_irq_ctx_set(isp, csi2, 1);
    csi2_irq_status_set(isp, csi2, 1);

    /* Set configuration (timings, format and links) */
    csi2_timing_config(isp, csi2, timing);
    csi2_recv_config(isp, csi2, &csi2->ctrl);
    csi2_ctx_config(isp, csi2, &csi2->contexts[0]);

    return 0;
}
Пример #23
0
static int isp_video_open(struct file *file)
{
	struct isp_video *video = video_drvdata(file);
	struct isp_video_fh *handle;
	struct media_entity_pad *pad;
	int ret = 0;

	handle = kzalloc(sizeof(*handle), GFP_KERNEL);
	if (handle == NULL)
		return -ENOMEM;

	v4l2_fh_init(&handle->vfh, &video->video);
	v4l2_fh_add(&handle->vfh);

	/* If this is the first user, initialise the pipeline. */
	if (atomic_inc_return(&video->users) == 1) {
		if (isp_get(video->isp) == NULL) {
			ret = -EBUSY;
			goto done;
		}
	}

	isp_video_queue_init(&handle->queue, video->type, &isp_video_queue_ops,
			     video->isp->dev, sizeof(struct isp_buffer));

	memset(&handle->format, 0, sizeof(handle->format));
	handle->format.type = video->type;
	handle->timeperframe.denominator = 1;

	/* If a subdev is linked to this dev, then initialize the
	   format to match the subdev. */
	pad = media_entity_remote_pad(&video->pad);
	if(pad && pad->entity->type == MEDIA_ENTITY_TYPE_SUBDEV) {
		struct v4l2_subdev *subdev;
		struct v4l2_mbus_framefmt fmt_source;
		struct v4l2_subdev_frame_interval fi;
		int err;
		subdev = media_entity_to_v4l2_subdev(pad->entity);
		err = v4l2_subdev_call(subdev, pad, get_fmt, NULL, pad->index,
				       &fmt_source, V4L2_SUBDEV_FORMAT_ACTIVE);
		if(err >= 0) {
			isp_video_mbus_to_pix(video, &fmt_source, &(handle->format.fmt.pix));
			handle->format.fmt.pix.width = fmt_source.width;
			handle->format.fmt.pix.height= fmt_source.height;
		}

		err = __find_timeperframe(pad, subdev, &fi, 0);
		if(err >= 0) {
			handle->timeperframe.numerator = fi.interval.numerator;
			handle->timeperframe.denominator = fi.interval.denominator;
		}
	}

	handle->video = video;
	file->private_data = &handle->vfh;

done:
	if (ret < 0) {
		v4l2_fh_del(&handle->vfh);
		atomic_dec(&video->users);
		kfree(handle);
	}

	return ret;
}