コード例 #1
0
ファイル: gsc-capture.c プロジェクト: hedongjie/m35x
static int gsc_capture_streamon(struct file *file, void *priv,
				enum v4l2_buf_type type)
{
	struct gsc_dev *gsc = video_drvdata(file);
	struct gsc_pipeline *p = &gsc->pipeline;
	int ret;

	if (gsc_cap_active(gsc))
		return -EBUSY;

	if (p->disp) {
		gsc_pm_qos_ctrl(gsc, GSC_QOS_ON, 267000, 200000);
		media_entity_pipeline_start(&p->disp->entity, p->pipe);
	} else if (p->sensor) {
		media_entity_pipeline_start(&p->sensor->entity, p->pipe);
	} else {
		gsc_err("Error pipeline");
		return -EPIPE;
	}

	ret = gsc_cap_link_validate(gsc);
	if (ret)
		return ret;

	gsc_hw_set_sw_reset(gsc);
	ret= gsc_wait_reset(gsc);
	if (ret < 0) {
		gsc_err("gscaler s/w reset timeout");
		return ret;
	}
	gsc_hw_set_output_buf_mask_all(gsc);
	return vb2_streamon(&gsc->cap.vbq, type);
}
コード例 #2
0
static int gsc_capture_streamon(struct file *file, void *priv,
				enum v4l2_buf_type type)
{
	struct gsc_dev *gsc = video_drvdata(file);
	struct gsc_pipeline *p = &gsc->pipeline;
	int ret;

	if (gsc_cap_active(gsc))
		return -EBUSY;

	if (p->disp) {
		media_entity_pipeline_start(&p->disp->entity, p->pipe);
	} else if (p->sensor) {
		media_entity_pipeline_start(&p->sensor->entity, p->pipe);
	} else {
		gsc_err("Error pipeline");
		return -EPIPE;
	}

	ret = gsc_cap_link_validate(gsc);
	if (ret)
		return ret;

	return vb2_streamon(&gsc->cap.vbq, type);
}
コード例 #3
0
ファイル: fimc-isp-video.c プロジェクト: a2hojsjsjs/linux
static int isp_video_streamon(struct file *file, void *priv,
				      enum v4l2_buf_type type)
{
	struct fimc_isp *isp = video_drvdata(file);
	struct exynos_video_entity *ve = &isp->video_capture.ve;
	struct media_entity *me = &ve->vdev.entity;
	int ret;

	ret = media_entity_pipeline_start(me, &ve->pipe->mp);
	if (ret < 0)
		return ret;

	ret = isp_video_pipeline_validate(isp);
	if (ret < 0)
		goto p_stop;

	ret = vb2_ioctl_streamon(file, priv, type);
	if (ret < 0)
		goto p_stop;

	isp->video_capture.streaming = 1;
	return 0;
p_stop:
	media_entity_pipeline_stop(me);
	return ret;
}
コード例 #4
0
ファイル: fimc-lite.c プロジェクト: 19Dan01/linux
static int fimc_lite_streamon(struct file *file, void *priv,
			      enum v4l2_buf_type type)
{
	struct fimc_lite *fimc = video_drvdata(file);
	struct media_entity *entity = &fimc->ve.vdev.entity;
	int ret;

	if (fimc_lite_active(fimc))
		return -EBUSY;

	ret = media_entity_pipeline_start(entity, &fimc->ve.pipe->mp);
	if (ret < 0)
		return ret;

	ret = fimc_pipeline_validate(fimc);
	if (ret < 0)
		goto err_p_stop;

	fimc->sensor = fimc_find_remote_sensor(&fimc->subdev.entity);

	ret = vb2_ioctl_streamon(file, priv, type);
	if (!ret) {
		fimc->streaming = true;
		return ret;
	}

err_p_stop:
	media_entity_pipeline_stop(entity);
	return 0;
}
コード例 #5
0
ファイル: xilinx-dma.c プロジェクト: bmouring/linux-xlnx
static int xvip_dma_start_streaming(struct vb2_queue *vq, unsigned int count)
{
	struct xvip_dma *dma = vb2_get_drv_priv(vq);
	struct xvip_pipeline *pipe;
	int ret;

	dma->sequence = 0;

	/*
	 * Start streaming on the pipeline. No link touching an entity in the
	 * pipeline can be activated or deactivated once streaming is started.
	 *
	 * Use the pipeline object embedded in the first DMA object that starts
	 * streaming.
	 */
	pipe = dma->video.entity.pipe
	     ? to_xvip_pipeline(&dma->video.entity) : &dma->pipe;

	ret = media_entity_pipeline_start(&dma->video.entity, &pipe->pipe);
	if (ret < 0)
		return ret;

	/* Verify that the configured format matches the output of the
	 * connected subdev.
	 */
	ret = xvip_dma_verify_format(dma);
	if (ret < 0)
		goto error;

	ret = xvip_pipeline_prepare(pipe, dma);
	if (ret < 0)
		goto error;

	/* Start the DMA engine. This must be done before starting the blocks
	 * in the pipeline to avoid DMA synchronization issues.
	 */
	dma_async_issue_pending(dma->dma);

	/* Start the pipeline. */
	xvip_pipeline_set_stream(pipe, true);

	return 0;

error:
	media_entity_pipeline_stop(&dma->video.entity);
	return ret;
}
コード例 #6
0
ファイル: fimc-capture.c プロジェクト: artynet/linux-3.3.8
static int fimc_cap_streamon(struct file *file, void *priv,
			     enum v4l2_buf_type type)
{
	struct fimc_dev *fimc = video_drvdata(file);
	struct fimc_pipeline *p = &fimc->pipeline;
	int ret;

	if (fimc_capture_active(fimc))
		return -EBUSY;

	media_entity_pipeline_start(&p->sensor->entity, p->pipe);

	if (fimc->vid_cap.user_subdev_api) {
		ret = fimc_pipeline_validate(fimc);
		if (ret)
			return ret;
	}
	return vb2_streamon(&fimc->vid_cap.vbq, type);
}
コード例 #7
0
static int fimc_lite_streamon(struct file *file, void *priv,
			      enum v4l2_buf_type type)
{
	struct fimc_lite *fimc = video_drvdata(file);
	struct v4l2_subdev *sensor = fimc->pipeline.subdevs[IDX_SENSOR];
	struct fimc_pipeline *p = &fimc->pipeline;
	int ret;

	if (fimc_lite_active(fimc))
		return -EBUSY;

	ret = media_entity_pipeline_start(&sensor->entity, p->m_pipeline);
	if (ret < 0)
		return ret;

	ret = fimc_pipeline_validate(fimc);
	if (ret) {
		media_entity_pipeline_stop(&sensor->entity);
		return ret;
	}

	return vb2_streamon(&fimc->vb_queue, type);
}
コード例 #8
0
static int fimc_cap_streamon(struct file *file, void *priv,
			     enum v4l2_buf_type type)
{
	struct fimc_dev *fimc = video_drvdata(file);
	struct fimc_pipeline *p = &fimc->pipeline;
	struct v4l2_subdev *sd = p->subdevs[IDX_SENSOR];
	int ret;

	if (fimc_capture_active(fimc))
		return -EBUSY;

	ret = media_entity_pipeline_start(&sd->entity, p->m_pipeline);
	if (ret < 0)
		return ret;

	if (fimc->vid_cap.user_subdev_api) {
		ret = fimc_pipeline_validate(fimc);
		if (ret < 0) {
			media_entity_pipeline_stop(&sd->entity);
			return ret;
		}
	}
	return vb2_streamon(&fimc->vid_cap.vbq, type);
}
コード例 #9
0
ファイル: iss_video.c プロジェクト: Astralix/mainline-dss11
/*
 * Stream management
 *
 * Every ISS pipeline has a single input and a single output. The input can be
 * either a sensor or a video node. The output is always a video node.
 *
 * As every pipeline has an output video node, the ISS video objects at the
 * pipeline output stores the pipeline state. It tracks the streaming state of
 * both the input and output, as well as the availability of buffers.
 *
 * In sensor-to-memory mode, frames are always available at the pipeline input.
 * Starting the sensor usually requires I2C transfers and must be done in
 * interruptible context. The pipeline is started and stopped synchronously
 * to the stream on/off commands. All modules in the pipeline will get their
 * subdev set stream handler called. The module at the end of the pipeline must
 * delay starting the hardware until buffers are available at its output.
 *
 * In memory-to-memory mode, starting/stopping the stream requires
 * synchronization between the input and output. ISS modules can't be stopped
 * in the middle of a frame, and at least some of the modules seem to become
 * busy as soon as they're started, even if they don't receive a frame start
 * event. For that reason frames need to be processed in single-shot mode. The
 * driver needs to wait until a frame is completely processed and written to
 * memory before restarting the pipeline for the next frame. Pipelined
 * processing might be possible but requires more testing.
 *
 * Stream start must be delayed until buffers are available at both the input
 * and output. The pipeline must be started in the videobuf queue callback with
 * the buffers queue spinlock held. The modules subdev set stream operation must
 * not sleep.
 */
static int
iss_video_streamon(struct file *file, void *fh, enum v4l2_buf_type type)
{
	struct iss_video_fh *vfh = to_iss_video_fh(fh);
	struct iss_video *video = video_drvdata(file);
	struct media_entity_graph graph;
	struct media_entity *entity;
	enum iss_pipeline_state state;
	struct iss_pipeline *pipe;
	struct iss_video *far_end;
	unsigned long flags;
	int ret;

	if (type != video->type)
		return -EINVAL;

	mutex_lock(&video->stream_lock);

	/* Start streaming on the pipeline. No link touching an entity in the
	 * pipeline can be activated or deactivated once streaming is started.
	 */
	pipe = video->video.entity.pipe
	     ? to_iss_pipeline(&video->video.entity) : &video->pipe;
	pipe->external = NULL;
	pipe->external_rate = 0;
	pipe->external_bpp = 0;
	pipe->entities = 0;

	if (video->iss->pdata->set_constraints)
		video->iss->pdata->set_constraints(video->iss, true);

	ret = media_entity_pipeline_start(&video->video.entity, &pipe->pipe);
	if (ret < 0)
		goto err_media_entity_pipeline_start;

	entity = &video->video.entity;
	media_entity_graph_walk_start(&graph, entity);
	while ((entity = media_entity_graph_walk_next(&graph)))
		pipe->entities |= 1 << entity->id;

	/* Verify that the currently configured format matches the output of
	 * the connected subdev.
	 */
	ret = iss_video_check_format(video, vfh);
	if (ret < 0)
		goto err_iss_video_check_format;

	video->bpl_padding = ret;
	video->bpl_value = vfh->format.fmt.pix.bytesperline;

	/* Find the ISS video node connected at the far end of the pipeline and
	 * update the pipeline.
	 */
	far_end = iss_video_far_end(video);

	if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
		state = ISS_PIPELINE_STREAM_OUTPUT | ISS_PIPELINE_IDLE_OUTPUT;
		pipe->input = far_end;
		pipe->output = video;
	} else {
		if (far_end == NULL) {
			ret = -EPIPE;
			goto err_iss_video_check_format;
		}

		state = ISS_PIPELINE_STREAM_INPUT | ISS_PIPELINE_IDLE_INPUT;
		pipe->input = video;
		pipe->output = far_end;
	}

	spin_lock_irqsave(&pipe->lock, flags);
	pipe->state &= ~ISS_PIPELINE_STREAM;
	pipe->state |= state;
	spin_unlock_irqrestore(&pipe->lock, flags);

	/* Set the maximum time per frame as the value requested by userspace.
	 * This is a soft limit that can be overridden if the hardware doesn't
	 * support the request limit.
	 */
	if (video->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
		pipe->max_timeperframe = vfh->timeperframe;

	video->queue = &vfh->queue;
	INIT_LIST_HEAD(&video->dmaqueue);
	spin_lock_init(&video->qlock);
	video->error = false;
	atomic_set(&pipe->frame_number, -1);

	ret = vb2_streamon(&vfh->queue, type);
	if (ret < 0)
		goto err_iss_video_check_format;

	/* In sensor-to-memory mode, the stream can be started synchronously
	 * to the stream on command. In memory-to-memory mode, it will be
	 * started when buffers are queued on both the input and output.
	 */
	if (pipe->input == NULL) {
		unsigned long flags;
		ret = omap4iss_pipeline_set_stream(pipe,
					      ISS_PIPELINE_STREAM_CONTINUOUS);
		if (ret < 0)
			goto err_omap4iss_set_stream;
		spin_lock_irqsave(&video->qlock, flags);
		if (list_empty(&video->dmaqueue))
			video->dmaqueue_flags |= ISS_VIDEO_DMAQUEUE_UNDERRUN;
		spin_unlock_irqrestore(&video->qlock, flags);
	}

	mutex_unlock(&video->stream_lock);
	return 0;

err_omap4iss_set_stream:
	vb2_streamoff(&vfh->queue, type);
err_iss_video_check_format:
	media_entity_pipeline_stop(&video->video.entity);
err_media_entity_pipeline_start:
	if (video->iss->pdata->set_constraints)
		video->iss->pdata->set_constraints(video->iss, false);
	video->queue = NULL;

	mutex_unlock(&video->stream_lock);
	return ret;
}