Пример #1
0
static int mxr_streamer_put(struct mxr_device *mdev, struct v4l2_subdev *sd)
{
	int i;
	int ret = 0;
	int local = 1;
	struct media_pad *pad;
	struct sub_mxr_device *sub_mxr;
	struct mxr_layer *layer;
	struct v4l2_subdev *gsc_sd;
	struct exynos_entity_data *md_data;
	struct s5p_mxr_platdata *pdata = mdev->pdata;

	mutex_lock(&mdev->s_mutex);
	--mdev->n_streamer;
	mxr_dbg(mdev, "%s(%d)\n", __func__, mdev->n_streamer);

	/* distinction number of local path */
	if (mdev->mxr_data_from == FROM_GSC_SD) {
		local = 0;
		for (i = 0; i < MXR_MAX_SUB_MIXERS; ++i) {
			sub_mxr = &mdev->sub_mxr[i];
			if (sub_mxr->local)
				local += sub_mxr->local;
		}
		if (local == 2)
			mxr_layer_sync(mdev, MXR_DISABLE);

		/* stop gscaler --> waiting for frame done */
		pad = &sd->entity.pads[MXR_PAD_SINK_GSCALER];
		pad = media_entity_remote_source(pad);
		if (pad) {
			gsc_sd = media_entity_to_v4l2_subdev(
					pad->entity);
			mxr_dbg(mdev, "stop from %s\n", gsc_sd->name);
			md_data = (struct exynos_entity_data *)
				gsc_sd->dev_priv;
			if (is_ip_ver_5g_1 || is_ip_ver_5a_0)
				md_data->media_ops->power_off(gsc_sd);
		}

		/* disable video layer */
		for (i = 0; i < MXR_MAX_SUB_MIXERS; ++i) {
			sub_mxr = &mdev->sub_mxr[i];
			if (sub_mxr->local) {
				layer = sub_mxr->layer[MXR_LAYER_VIDEO];
				layer->ops.stream_set(layer, 0);
				layer->pipe.state = MXR_PIPELINE_IDLE;
			}
		}
	}

	if ((mdev->n_streamer == 0 && local == 1) ||
	    (mdev->n_streamer == 1 && local == 2)) {
		mxr_reg_streamoff(mdev);
		/* vsync applies Mixer setup */
		ret = mxr_reg_wait4update(mdev);
		if (ret) {
			mxr_err(mdev, "failed to get vsync (%d) from output\n",
					ret);
			goto out;
		}
	}
	/* disable mixer clock */
	if (mdev->mxr_data_from == FROM_GSC_SD)
		mxr_power_put(mdev);

	WARN(mdev->n_streamer < 0, "negative number of streamers (%d)\n",
		mdev->n_streamer);

out:
#if defined(CONFIG_ARM_EXYNOS5410_BUS_DEVFREQ)
	if ((mdev->n_streamer == 0 && local == 1) ||
	    (mdev->n_streamer == 1 && local == 2)) {
		pm_qos_remove_request(&exynos5_tv_mif_qos);
		pm_qos_remove_request(&exynos5_tv_int_qos);
	}
#endif
	mutex_unlock(&mdev->s_mutex);
	mxr_reg_dump(mdev);

	return ret;
}
Пример #2
0
static int isp_csi2_configure(struct isp_csi2_device *csi2)
{
	const struct isp_v4l2_subdevs_group *pdata;
	struct isp_device *isp = csi2->isp;
	struct isp_csi2_timing_cfg *timing = &csi2->timing[0];
	struct v4l2_subdev *sensor;
	struct media_pad *pad;

	/*
	 * CSI2 fields that can be updated while the context has
	 * been enabled or the interface has been enabled are not
	 * updated dynamically currently. So we do not allow to
	 * reconfigure if either has been enabled
	 */
	if (csi2->contexts[0].enabled || csi2->ctrl.if_enable)
		return -EBUSY;

	pad = media_entity_remote_source(&csi2->pads[CSI2_PAD_SINK]);
	sensor = media_entity_to_v4l2_subdev(pad->entity);
	pdata = sensor->host_priv;

	csi2->frame_skip = 0;
	v4l2_subdev_call(sensor, sensor, g_skip_frames, &csi2->frame_skip);

	csi2->ctrl.vp_out_ctrl = pdata->bus.csi2.vpclk_div;
	csi2->ctrl.frame_mode = ISP_CSI2_FRAME_IMMEDIATE;
	csi2->ctrl.ecc_enable = pdata->bus.csi2.crc;

	timing->ionum = 1;
	timing->force_rx_mode = 1;
	timing->stop_state_16x = 1;
	timing->stop_state_4x = 1;
	timing->stop_state_counter = 0x1FF;

	/*
	 * The CSI2 receiver can't do any format conversion except DPCM
	 * decompression, so every set_format call configures both pads
	 * and enables DPCM decompression as a special case:
	 */
	if (csi2->formats[CSI2_PAD_SINK].code !=
	    csi2->formats[CSI2_PAD_SOURCE].code)
		csi2->dpcm_decompress = true;
	else
		csi2->dpcm_decompress = false;

	csi2->contexts[0].format_id = isp_csi2_ctx_map_format(csi2);

	if (csi2->video_out.bpl_padding == 0)
		csi2->contexts[0].data_offset = 0;
	else
		csi2->contexts[0].data_offset = csi2->video_out.bpl_value;

	/*
	 * Enable end of frame and end of line signals generation for
	 * context 0. These signals are generated from CSI2 receiver to
	 * qualify the last pixel of a frame and the last pixel of a line.
	 * Without enabling the signals CSI2 receiver writes data to memory
	 * beyond buffer size and/or data line offset is not handled correctly.
	 */
	csi2->contexts[0].eof_enabled = 1;
	csi2->contexts[0].eol_enabled = 1;

	isp_csi2_irq_complexio1_set(isp, csi2, 1);
	isp_csi2_irq_ctx_set(isp, csi2, 1);
	isp_csi2_irq_status_set(isp, csi2, 1);

	/* Set configuration (timings, format and links) */
	isp_csi2_timing_config(isp, csi2, timing);
	isp_csi2_recv_config(isp, csi2, &csi2->ctrl);
	isp_csi2_ctx_config(isp, csi2, &csi2->contexts[0]);

	return 0;
}
Пример #3
0
/*
 * Validate a pipeline by checking both ends of all links for format
 * discrepancies.
 *
 * Compute the minimum time per frame value as the maximum of time per frame
 * limits reported by every block in the pipeline.
 *
 * Return 0 if all formats match, or -EPIPE if at least one link is found with
 * different formats on its two ends.
 */
static int isp_video_validate_pipeline(struct isp_pipeline *pipe)
{
	struct isp_device *isp = pipe->output->isp;
	struct v4l2_subdev_format fmt_source;
	struct v4l2_subdev_format fmt_sink;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;
	int ret;

	pipe->max_rate = pipe->l3_ick;

	subdev = isp_video_remote_subdev(pipe->output, NULL);
	if (subdev == NULL)
		return -EPIPE;

	while (1) {
		unsigned int shifter_link;
		/* Retrieve the sink format */
		pad = &subdev->entity.pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		fmt_sink.pad = pad->index;
		fmt_sink.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt_sink);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		/* Update the maximum frame rate */
		if (subdev == &isp->isp_res.subdev)
			omap3isp_resizer_max_rate(&isp->isp_res,
						  &pipe->max_rate);

		/* Check ccdc maximum data rate when data comes from sensor
		 * TODO: Include ccdc rate in pipe->max_rate and compare the
		 *       total pipe rate with the input data rate from sensor.
		 */
		if (subdev == &isp->isp_ccdc.subdev && pipe->input == NULL) {
			unsigned int rate = UINT_MAX;

			omap3isp_ccdc_max_rate(&isp->isp_ccdc, &rate);
			if (isp->isp_ccdc.vpcfg.pixelclk > rate)
				return -ENOSPC;
		}

		/* If sink pad is on CCDC, the link has the lane shifter
		 * in the middle of it. */
		shifter_link = subdev == &isp->isp_ccdc.subdev;

		/* Retrieve the source format */
		pad = media_entity_remote_source(pad);
		if (pad == NULL ||
		    media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
			break;

		subdev = media_entity_to_v4l2_subdev(pad->entity);

		fmt_source.pad = pad->index;
		fmt_source.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt_source);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		/* Check if the two ends match */
		if (fmt_source.format.width != fmt_sink.format.width ||
		    fmt_source.format.height != fmt_sink.format.height)
			return -EPIPE;

		if (shifter_link) {
			unsigned int parallel_shift = 0;
			if (isp->isp_ccdc.input == CCDC_INPUT_PARALLEL) {
				struct isp_parallel_platform_data *pdata =
					&((struct isp_v4l2_subdevs_group *)
					      subdev->host_priv)->bus.parallel;
				parallel_shift = pdata->data_lane_shift * 2;
			}
			if (!isp_video_is_shiftable(fmt_source.format.code,
						fmt_sink.format.code,
						parallel_shift))
				return -EPIPE;
		} else if (fmt_source.format.code != fmt_sink.format.code)
			return -EPIPE;
	}

	return 0;
}
Пример #4
0
static int mxr_streamer_get(struct mxr_device *mdev, struct v4l2_subdev *sd)
{
	int i;
	int ret = 0;
	int local = 1;
	struct sub_mxr_device *sub_mxr;
	struct mxr_layer *layer;
	struct media_pad *pad;
	struct v4l2_mbus_framefmt mbus_fmt;
#if defined(CONFIG_CPU_EXYNOS4210)
	struct mxr_resources *res = &mdev->res;
#endif
	struct v4l2_control ctrl;

	mutex_lock(&mdev->s_mutex);
	++mdev->n_streamer;
	mxr_dbg(mdev, "%s(%d)\n", __func__, mdev->n_streamer);
	/* If pipeline is started from Gscaler input video device,
	 * TV basic configuration must be set before running mixer */

	if (mdev->mxr_data_from == FROM_GSC_SD) {
		mxr_dbg(mdev, "%s: from gscaler\n", __func__);
		local = 0;
		/* enable mixer clock */
		ret = mxr_power_get(mdev);
		if (ret < 0) {
			mxr_err(mdev, "power on failed for video layer\n");
			ret = -ENODEV;
			goto out;
		}

		for (i = 0; i < MXR_MAX_SUB_MIXERS; ++i) {
			sub_mxr = &mdev->sub_mxr[i];
			if (sub_mxr->local) {
				layer = sub_mxr->layer[MXR_LAYER_VIDEO];
				layer->pipe.state = MXR_PIPELINE_STREAMING;
				mxr_layer_geo_fix(layer);
				layer->ops.format_set(layer, layer->fmt,
							    &layer->geo);
				layer->ops.stream_set(layer, 1);
				local += sub_mxr->local;
			}
		}
		if (local == 2)
			mxr_layer_sync(mdev, MXR_ENABLE);

		/* Set the TVOUT register about gsc-mixer local path */
		mxr_reg_local_path_set(mdev);
	}

	/* Alpha blending configuration always can be changed
	 * whenever streaming */
	mxr_set_alpha_blend(mdev);
	mxr_reg_set_color_range(mdev);
	mxr_reg_set_layer_prio(mdev);

	if ((mdev->n_streamer == 1 && local == 1) ||
	    (mdev->n_streamer == 2 && local == 2)) {
#if defined(CONFIG_ARM_EXYNOS5410_BUS_DEVFREQ)
		pm_qos_add_request(&exynos5_tv_mif_qos, PM_QOS_BUS_THROUGHPUT, 800000);
		pm_qos_add_request(&exynos5_tv_int_qos, PM_QOS_DEVICE_THROUGHPUT, 400000);
#endif

		for (i = MXR_PAD_SOURCE_GSCALER; i < MXR_PADS_NUM; ++i) {
			pad = &sd->entity.pads[i];

			/* find sink pad of output via enabled link*/
			pad = media_entity_remote_source(pad);
			if (pad)
				if (media_entity_type(pad->entity)
						== MEDIA_ENT_T_V4L2_SUBDEV)
					break;

			if (i == MXR_PAD_SOURCE_GRP1) {
				ret = -ENODEV;
				goto out;
			}
		}

		sd = media_entity_to_v4l2_subdev(pad->entity);

		mxr_dbg(mdev, "cookie of current output = (%d)\n",
			to_output(mdev)->cookie);

#if defined(CONFIG_CPU_EXYNOS4210)
		if (to_output(mdev)->cookie == 0)
			clk_set_parent(res->sclk_mixer, res->sclk_dac);
		else
			clk_set_parent(res->sclk_mixer, res->sclk_hdmi);
#endif
		mxr_reg_s_output(mdev, to_output(mdev)->cookie);

		ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mbus_fmt);
		if (ret) {
			mxr_err(mdev, "failed to get mbus_fmt for output %s\n",
					sd->name);
			goto out;
		}
		ctrl.id = V4L2_CID_TV_GET_DVI_MODE;
		ret = v4l2_subdev_call(sd, core, g_ctrl, &ctrl);
		if (ret) {
			mxr_err(mdev, "failed to get DVI or HDMI mode %s\n",
					sd->name);
			goto out;
		}

		mxr_reg_set_mbus_fmt(mdev, &mbus_fmt, ctrl.value);
		ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mbus_fmt);
		if (ret) {
			mxr_err(mdev, "failed to set mbus_fmt for output %s\n",
					sd->name);
			goto out;
		}
		mxr_reg_streamon(mdev);

		/* start hdmi */
		ctrl.id = V4L2_CID_TV_HDMI_STATUS;
		ret = v4l2_subdev_call(sd, core, g_ctrl, &ctrl);
		if (ret) {
			mxr_err(mdev, "failed to get output %s status for start\n",
					sd->name);
			goto out;
		}
		if (ctrl.value == (HDMI_STOP | HPD_HIGH)) {
			ret = v4l2_subdev_call(sd, core, s_power, 1);
			if (ret) {
				mxr_err(mdev, "failed to get power for output %s\n",
						sd->name);
				goto out;
			}
			ret = v4l2_subdev_call(sd, video, s_stream, 1);
			if (ret) {
				mxr_err(mdev, "starting stream failed for output %s\n",
						sd->name);
				goto out;
			}
		}

		ret = mxr_reg_wait4update(mdev);
		if (ret) {
			mxr_err(mdev, "failed to get vsync (%d) from output\n",
					ret);
			goto out;
		}
	}

out:
	mutex_unlock(&mdev->s_mutex);
	mxr_reg_dump(mdev);

	return ret;
}
Пример #5
0
static int mxr_streamer_put(struct mxr_device *mdev, struct v4l2_subdev *sd)
{
	int i;
	int ret = 0;
	int local = 1;
	struct media_pad *pad;
	struct sub_mxr_device *sub_mxr;
	struct mxr_layer *layer;
	struct v4l2_subdev *hdmi_sd;
	struct v4l2_subdev *gsc_sd;
	struct exynos_entity_data *md_data;
	struct s5p_mxr_platdata *pdata = mdev->pdata;
	struct v4l2_control ctrl;

	mutex_lock(&mdev->s_mutex);
	--mdev->n_streamer;
	mxr_dbg(mdev, "%s(%d)\n", __func__, mdev->n_streamer);

	/* distinction number of local path */
	if (mdev->mxr_data_from == FROM_GSC_SD) {
		local = 0;
		for (i = 0; i < MXR_MAX_SUB_MIXERS; ++i) {
			sub_mxr = &mdev->sub_mxr[i];
			if (sub_mxr->local)
				local += sub_mxr->local;
		}
		if (local == 2)
			mxr_layer_sync(mdev, MXR_DISABLE);

		/* stop gscaler --> waiting for frame done */
		pad = &sd->entity.pads[MXR_PAD_SINK_GSCALER];
		pad = media_entity_remote_source(pad);
		if (pad) {
			gsc_sd = media_entity_to_v4l2_subdev(
					pad->entity);
			mxr_dbg(mdev, "stop from %s\n", gsc_sd->name);
			md_data = (struct exynos_entity_data *)
				gsc_sd->dev_priv;
			if (is_ip_ver_5g_1 || is_ip_ver_5a_0)
				md_data->media_ops->power_off(gsc_sd);
		}

		/* disable video layer */
		for (i = 0; i < MXR_MAX_SUB_MIXERS; ++i) {
			sub_mxr = &mdev->sub_mxr[i];
			if (sub_mxr->local) {
				layer = sub_mxr->layer[MXR_LAYER_VIDEO];
				layer->ops.stream_set(layer, 0);
				layer->pipe.state = MXR_PIPELINE_IDLE;
			}
		}
	}

	if ((mdev->n_streamer == 0 && local == 1) ||
	    (mdev->n_streamer == 1 && local == 2)) {
		for (i = MXR_PAD_SOURCE_GSCALER; i < MXR_PADS_NUM; ++i) {
			pad = &sd->entity.pads[i];

			/* find sink pad of output via enabled link*/
			pad = media_entity_remote_source(pad);
			if (pad)
				if (media_entity_type(pad->entity)
						== MEDIA_ENT_T_V4L2_SUBDEV)
					break;

			if (i == MXR_PAD_SOURCE_GRP1) {
				ret = -ENODEV;
				goto out;
			}
		}

		hdmi_sd = media_entity_to_v4l2_subdev(pad->entity);

		mxr_reg_streamoff(mdev);
		/* vsync applies Mixer setup */
		ret = mxr_reg_wait4update(mdev);
		if (ret) {
			mxr_err(mdev, "failed to get vsync (%d) from output\n",
					ret);
			goto out;
		}

		/* stop hdmi */
		ctrl.id = V4L2_CID_TV_HDMI_STATUS;
		ret = v4l2_subdev_call(hdmi_sd, core, g_ctrl, &ctrl);
		if (ret) {
			mxr_err(mdev, "failed to get output %s status for stop\n",
					hdmi_sd->name);
			goto out;
		}
		/*
		 * HDMI should be turn off only when not in use.
		 * 1. cable out
		 * 2. suspend (blank is called at suspend)
		 */
		if (ctrl.value == (HDMI_STREAMING | HPD_LOW) || mdev->blank) {
			ret = v4l2_subdev_call(hdmi_sd, video, s_stream, 0);
			if (ret) {
				mxr_err(mdev, "stopping stream failed for output %s\n",
						hdmi_sd->name);
				goto out;
			}
			ret = v4l2_subdev_call(hdmi_sd, core, s_power, 0);
			if (ret) {
				mxr_err(mdev, "failed to put power for output %s\n",
						hdmi_sd->name);
				goto out;
			}
			mdev->blank = 0;
		}
	}
	/* disable mixer clock */
	if (mdev->mxr_data_from == FROM_GSC_SD)
		mxr_power_put(mdev);

	WARN(mdev->n_streamer < 0, "negative number of streamers (%d)\n",
		mdev->n_streamer);

out:
#if defined(CONFIG_ARM_EXYNOS5410_BUS_DEVFREQ)
	if ((mdev->n_streamer == 0 && local == 1) ||
	    (mdev->n_streamer == 1 && local == 2)) {
		pm_qos_remove_request(&exynos5_tv_mif_qos);
		pm_qos_remove_request(&exynos5_tv_int_qos);
	}
#endif
	mutex_unlock(&mdev->s_mutex);
	mxr_reg_dump(mdev);

	return ret;
}
Пример #6
0
/*
 * Validate a pipeline by checking both ends of all links for format
 * discrepancies.
 *
 * Compute the minimum time per frame value as the maximum of time per frame
 * limits reported by every block in the pipeline.
 *
 * Return 0 if all formats match, or -EPIPE if at least one link is found with
 * different formats on its two ends.
 */
static int isp_video_validate_pipeline(struct isp_pipeline *pipe)
{
	struct isp_device *isp = pipe->output->isp;
	struct v4l2_subdev_format fmt_source;
	struct v4l2_subdev_format fmt_sink;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;
	int ret;

	pipe->max_rate = pipe->l3_ick;

	subdev = isp_video_remote_subdev(pipe->output, NULL);
	if (subdev == NULL)
		return -EPIPE;

	while (1) {
		/* Retrieve the sink format */
		pad = &subdev->entity.pads[0];
		if (!(pad->flags & MEDIA_PAD_FLAG_INPUT))
			break;

		fmt_sink.pad = pad->index;
		fmt_sink.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt_sink);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		/* Update the maximum frame rate */
		if (subdev == &isp->isp_res.subdev)
			ispresizer_max_rate(&isp->isp_res, &pipe->max_rate);

		/* Check ccdc maximum data rate when data comes from sensor
		 * TODO: Include ccdc rate in pipe->max_rate and compare the
		 *       total pipe rate with the input data rate from sensor.
		 */
		if (subdev == &isp->isp_ccdc.subdev && pipe->input == NULL) {
			unsigned int rate = UINT_MAX;

			ispccdc_max_rate(&isp->isp_ccdc, &rate);
			if (isp->isp_ccdc.vpcfg.pixelclk > rate)
				return -ENOSPC;
		}

		/* Retrieve the source format */
		pad = media_entity_remote_source(pad);
		if (pad == NULL ||
		    media_entity_type(pad->entity) !=
				MEDIA_ENTITY_TYPE_V4L2_SUBDEV)
			break;

		subdev = media_entity_to_v4l2_subdev(pad->entity);

		fmt_source.pad = pad->index;
		fmt_source.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		ret = v4l2_subdev_call(subdev, pad, get_fmt, NULL, &fmt_source);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return -EPIPE;

		/* Check if the two ends match */
		if (fmt_source.format.code != fmt_sink.format.code ||
		    fmt_source.format.width != fmt_sink.format.width ||
		    fmt_source.format.height != fmt_sink.format.height)
			return -EPIPE;
	}

	return 0;
}
Пример #7
0
static int gsc_cap_link_validate(struct gsc_dev *gsc)
{
	struct gsc_capture_device *cap = &gsc->cap;
	struct v4l2_subdev_format sink_fmt, src_fmt;
	struct v4l2_subdev *sd;
	struct media_pad *pad;
	int ret;

	/* Get the source pad connected with gsc-video */
	pad =  media_entity_remote_source(&cap->vd_pad);
	if (pad == NULL)
		return -EPIPE;
	/* Get the subdev of source pad */
	sd = media_entity_to_v4l2_subdev(pad->entity);

	while (1) {
		/* Find sink pad of the subdev*/
		pad = &sd->entity.pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;
		if (sd == cap->sd_cap) {
			struct gsc_frame *gf = &cap->ctx->s_frame;
			sink_fmt.format.width = gf->crop.width;
			sink_fmt.format.height = gf->crop.height;
			sink_fmt.format.code = gf->fmt ? gf->fmt->mbus_code : 0;
		} else {
			sink_fmt.pad = pad->index;
			sink_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
			ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &sink_fmt);
			if (ret < 0 && ret != -ENOIOCTLCMD) {
				gsc_err("failed %s subdev get_fmt", sd->name);
				return -EPIPE;
			}
		}
		gsc_dbg("sink sd name : %s", sd->name);
		/* Get the source pad connected with remote sink pad */
		pad = media_entity_remote_source(pad);
		if (pad == NULL ||
		    media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
			break;

		/* Get the subdev of source pad */
		sd = media_entity_to_v4l2_subdev(pad->entity);
		gsc_dbg("source sd name : %s", sd->name);

		src_fmt.pad = pad->index;
		src_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
		ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &src_fmt);
		if (ret < 0 && ret != -ENOIOCTLCMD) {
			gsc_err("failed %s subdev get_fmt", sd->name);
			return -EPIPE;
		}

		gsc_dbg("src_width : %d, src_height : %d, src_code : %d",
			src_fmt.format.width, src_fmt.format.height,
			src_fmt.format.code);
		gsc_dbg("sink_width : %d, sink_height : %d, sink_code : %d",
			sink_fmt.format.width, sink_fmt.format.height,
			sink_fmt.format.code);

		if (src_fmt.format.width != sink_fmt.format.width ||
		    src_fmt.format.height != sink_fmt.format.height ||
		    src_fmt.format.code != sink_fmt.format.code) {
			gsc_err("mismatch sink and source");
			return -EPIPE;
		}
	}

	return 0;
}
Пример #8
0
static int mxr_streamer_get(struct mxr_device *mdev, struct v4l2_subdev* sd)
{
	int i, ret;
	int local = 1;
	struct sub_mxr_device *sub_mxr;
	struct mxr_layer *layer;
	struct media_pad *pad;
	struct v4l2_mbus_framefmt mbus_fmt;
#if defined(CONFIG_CPU_EXYNOS4210)
	struct mxr_resources *res = &mdev->res;
#endif

	mutex_lock(&mdev->s_mutex);
	++mdev->n_streamer;
	mxr_dbg(mdev, "%s(%d)\n", __func__, mdev->n_streamer);
	/* If pipeline is started from Gscaler input video device,
	 * TV basic configuration must be set before running mixer */
	if (mdev->mxr_data_from == FROM_GSC_SD) {
		mxr_dbg(mdev, "%s: from gscaler\n", __func__);
		local = 0;
		/* enable mixer clock */
		ret = mxr_power_get(mdev);
		if (ret) {
			mxr_err(mdev, "power on failed\n");
			return -ENODEV;
		}
		/* turn on connected output device through link
		 * with mixer */
		mxr_output_get(mdev);

		for (i = 0; i < MXR_MAX_SUB_MIXERS; ++i) {
			sub_mxr = &mdev->sub_mxr[i];
			if (sub_mxr->local) {
				layer = sub_mxr->layer[MXR_LAYER_VIDEO];
				layer->pipe.state = MXR_PIPELINE_STREAMING;
				mxr_layer_geo_fix(layer);
				layer->ops.format_set(layer);
				layer->ops.stream_set(layer, 1);
				local += sub_mxr->local;
			}
		}
		if (local == 2)
			mxr_layer_sync(mdev, MXR_ENABLE);

		/* Set the TVOUT register about gsc-mixer local path */
		mxr_reg_local_path_set(mdev, mdev->mxr0_gsc, mdev->mxr1_gsc, mdev->flags);
	}

	/* Alpha blending configuration always can be changed
	 * whenever streaming */
	mxr_set_alpha_blend(mdev);
	mxr_reg_set_layer_prio(mdev);

	if ((mdev->n_streamer == 1 && local == 1) ||
	    (mdev->n_streamer == 2 && local == 2)) {
		for (i = MXR_PAD_SOURCE_GSCALER; i < MXR_PADS_NUM; ++i) {
			pad = &sd->entity.pads[i];

			/* find sink pad of output via enabled link*/
			pad = media_entity_remote_source(pad);
			if (pad)
				if (media_entity_type(pad->entity)
						== MEDIA_ENT_T_V4L2_SUBDEV)
					break;

			if (i == MXR_PAD_SOURCE_GRP1)
				return -ENODEV;
		}

		sd = media_entity_to_v4l2_subdev(pad->entity);

		mxr_dbg(mdev, "cookie of current output = (%d)\n",
			to_output(mdev)->cookie);

#if defined(CONFIG_CPU_EXYNOS4210)
		if (to_output(mdev)->cookie == 0)
			clk_set_parent(res->sclk_mixer, res->sclk_dac);
		else
			clk_set_parent(res->sclk_mixer, res->sclk_hdmi);
#endif
		mxr_reg_s_output(mdev, to_output(mdev)->cookie);

		ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mbus_fmt);
		if (ret) {
			mxr_err(mdev, "failed to get mbus_fmt for output %s\n",
					sd->name);
			return ret;
		}

		mxr_reg_set_mbus_fmt(mdev, &mbus_fmt);
		ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mbus_fmt);
		if (ret) {
			mxr_err(mdev, "failed to set mbus_fmt for output %s\n",
					sd->name);
			return ret;
		}
		mxr_reg_streamon(mdev);

		ret = v4l2_subdev_call(sd, video, s_stream, 1);
		if (ret) {
			mxr_err(mdev, "starting stream failed for output %s\n",
					sd->name);
			return ret;
		}

		ret = mxr_reg_wait4vsync(mdev);
		if (ret) {
			mxr_err(mdev, "failed to get vsync (%d) from output\n",
					ret);
			return ret;
		}
	}

	mutex_unlock(&mdev->s_mutex);
	mxr_reg_dump(mdev);

	return 0;
}
Пример #9
0
static int mxr_streamer_put(struct mxr_device *mdev, struct v4l2_subdev *sd)
{
	int ret, i;
	int local = 1;
	struct media_pad *pad;
	struct sub_mxr_device *sub_mxr;
	struct mxr_layer *layer;
	struct v4l2_subdev *hdmi_sd;
	struct v4l2_subdev *gsc_sd;
	struct exynos_entity_data *md_data;

	mutex_lock(&mdev->s_mutex);
	--mdev->n_streamer;
	mxr_dbg(mdev, "%s(%d)\n", __func__, mdev->n_streamer);

	/* distinction number of local path */
	if (mdev->mxr_data_from == FROM_GSC_SD) {
		local = 0;
		for (i = 0; i < MXR_MAX_SUB_MIXERS; ++i) {
			sub_mxr = &mdev->sub_mxr[i];
			if (sub_mxr->local) {
				local += sub_mxr->local;
			}
		}
		if (local == 2)
			mxr_layer_sync(mdev, MXR_DISABLE);
	}

	if ((mdev->n_streamer == 0 && local == 1) ||
	    (mdev->n_streamer == 1 && local == 2)) {
		for (i = MXR_PAD_SOURCE_GSCALER; i < MXR_PADS_NUM; ++i) {
			pad = &sd->entity.pads[i];

			/* find sink pad of output via enabled link*/
			pad = media_entity_remote_source(pad);
			if (pad)
				if (media_entity_type(pad->entity)
						== MEDIA_ENT_T_V4L2_SUBDEV)
					break;

			if (i == MXR_PAD_SOURCE_GRP1)
				return -ENODEV;
		}

		hdmi_sd = media_entity_to_v4l2_subdev(pad->entity);

		mxr_reg_streamoff(mdev);
		/* vsync applies Mixer setup */
		ret = mxr_reg_wait4vsync(mdev);
		if (ret) {
			mxr_err(mdev, "failed to get vsync (%d) from output\n",
					ret);
			return ret;
		}
	}
	/* When using local path between gscaler and mixer, below stop sequence
	 * must be processed */
	if (mdev->mxr_data_from == FROM_GSC_SD) {
		pad = &sd->entity.pads[MXR_PAD_SINK_GSCALER];
		pad = media_entity_remote_source(pad);
		if (pad) {
			gsc_sd = media_entity_to_v4l2_subdev(
					pad->entity);
			mxr_dbg(mdev, "stop from %s\n", gsc_sd->name);
			md_data = (struct exynos_entity_data *)
				gsc_sd->dev_priv;
			md_data->media_ops->power_off(gsc_sd);
		}
	}

	if ((mdev->n_streamer == 0 && local == 1) ||
	    (mdev->n_streamer == 1 && local == 2)) {
		ret = v4l2_subdev_call(hdmi_sd, video, s_stream, 0);
		if (ret) {
			mxr_err(mdev, "stopping stream failed for output %s\n",
					hdmi_sd->name);
			return ret;
		}
	}
	/* turn off connected output device through link
	 * with mixer */
	if (mdev->mxr_data_from == FROM_GSC_SD) {
		for (i = 0; i < MXR_MAX_SUB_MIXERS; ++i) {
			sub_mxr = &mdev->sub_mxr[i];
			if (sub_mxr->local) {
				layer = sub_mxr->layer[MXR_LAYER_VIDEO];
				layer->ops.stream_set(layer, 0);
				layer->pipe.state = MXR_PIPELINE_IDLE;
			}
		}
		mxr_reg_local_path_clear(mdev);
		mxr_output_put(mdev);

		/* disable mixer clock */
		mxr_power_put(mdev);
	}
	WARN(mdev->n_streamer < 0, "negative number of streamers (%d)\n",
		mdev->n_streamer);
	mutex_unlock(&mdev->s_mutex);
	mxr_reg_dump(mdev);

	return 0;
}