Example #1
0
bool vivid_vid_can_loop(struct vivid_dev *dev)
{
	if (dev->src_rect.width != dev->sink_rect.width ||
	    dev->src_rect.height != dev->sink_rect.height)
		return false;
	if (dev->fmt_cap->fourcc != dev->fmt_out->fourcc)
		return false;
	if (dev->field_cap != dev->field_out)
		return false;
	/*
	 * While this can be supported, it is just too much work
	 * to actually implement.
	 */
	if (dev->field_cap == V4L2_FIELD_SEQ_TB ||
	    dev->field_cap == V4L2_FIELD_SEQ_BT)
		return false;
	if (vivid_is_svid_cap(dev) && vivid_is_svid_out(dev)) {
		if (!(dev->std_cap & V4L2_STD_525_60) !=
		    !(dev->std_out & V4L2_STD_525_60))
			return false;
		return true;
	}
	if (vivid_is_hdmi_cap(dev) && vivid_is_hdmi_out(dev))
		return true;
	return false;
}
Example #2
0
int vidioc_dv_timings_cap(struct file *file, void *_fh,
				    struct v4l2_dv_timings_cap *cap)
{
	struct vivid_dev *dev = video_drvdata(file);
	struct video_device *vdev = video_devdata(file);

	if (vdev->vfl_dir == VFL_DIR_RX) {
		if (!vivid_is_hdmi_cap(dev))
			return -ENODATA;
	} else {
		if (!vivid_is_hdmi_out(dev))
			return -ENODATA;
	}
	*cap = vivid_dv_timings_cap;
	return 0;
}
Example #3
0
int vidioc_enum_dv_timings(struct file *file, void *_fh,
				    struct v4l2_enum_dv_timings *timings)
{
	struct vivid_dev *dev = video_drvdata(file);
	struct video_device *vdev = video_devdata(file);

	if (vdev->vfl_dir == VFL_DIR_RX) {
		if (!vivid_is_hdmi_cap(dev))
			return -ENODATA;
	} else {
		if (!vivid_is_hdmi_out(dev))
			return -ENODATA;
	}
	return v4l2_enum_dv_timings_cap(timings, &vivid_dv_timings_cap,
			NULL, NULL);
}
Example #4
0
bool vivid_vid_can_loop(struct vivid_dev *dev)
{
	if (dev->src_rect.width != dev->sink_rect.width ||
	    dev->src_rect.height != dev->sink_rect.height)
		return false;
	if (dev->fmt_cap->fourcc != dev->fmt_out->fourcc)
		return false;
	if (dev->field_cap != dev->field_out)
		return false;
	if (vivid_is_svid_cap(dev) && vivid_is_svid_out(dev)) {
		if (!(dev->std_cap & V4L2_STD_525_60) !=
		    !(dev->std_out & V4L2_STD_525_60))
			return false;
		return true;
	}
	if (vivid_is_hdmi_cap(dev) && vivid_is_hdmi_out(dev))
		return true;
	return false;
}
Example #5
0
static void vivid_fillbuff(struct vivid_dev *dev, struct vivid_buffer *buf)
{
	unsigned factor = V4L2_FIELD_HAS_T_OR_B(dev->field_cap) ? 2 : 1;
	unsigned line_height = 16 / factor;
	bool is_tv = vivid_is_sdtv_cap(dev);
	bool is_60hz = is_tv && (dev->std_cap & V4L2_STD_525_60);
	unsigned p;
	int line = 1;
	u8 *basep[TPG_MAX_PLANES][2];
	unsigned ms;
	char str[100];
	s32 gain;
	bool is_loop = false;

	if (dev->loop_video && dev->can_loop_video &&
	    ((vivid_is_svid_cap(dev) && !VIVID_INVALID_SIGNAL(dev->std_signal_mode)) ||
	     (vivid_is_hdmi_cap(dev) && !VIVID_INVALID_SIGNAL(dev->dv_timings_signal_mode))))
		is_loop = true;

	buf->vb.v4l2_buf.sequence = dev->vid_cap_seq_count;
	/*
	 * Take the timestamp now if the timestamp source is set to
	 * "Start of Exposure".
	 */
	if (dev->tstamp_src_is_soe)
		v4l2_get_timestamp(&buf->vb.v4l2_buf.timestamp);
	if (dev->field_cap == V4L2_FIELD_ALTERNATE) {
		/*
		 * 60 Hz standards start with the bottom field, 50 Hz standards
		 * with the top field. So if the 0-based seq_count is even,
		 * then the field is TOP for 50 Hz and BOTTOM for 60 Hz
		 * standards.
		 */
		buf->vb.v4l2_buf.field = ((dev->vid_cap_seq_count & 1) ^ is_60hz) ?
			V4L2_FIELD_TOP : V4L2_FIELD_BOTTOM;
		/*
		 * The sequence counter counts frames, not fields. So divide
		 * by two.
		 */
		buf->vb.v4l2_buf.sequence /= 2;
	} else {
		buf->vb.v4l2_buf.field = dev->field_cap;
	}
	tpg_s_field(&dev->tpg, buf->vb.v4l2_buf.field);
	tpg_s_perc_fill_blank(&dev->tpg, dev->must_blank[buf->vb.v4l2_buf.index]);

	vivid_precalc_copy_rects(dev);

	for (p = 0; p < tpg_g_planes(&dev->tpg); p++) {
		void *vbuf = vb2_plane_vaddr(&buf->vb, p);

		/*
		 * The first plane of a multiplanar format has a non-zero
		 * data_offset. This helps testing whether the application
		 * correctly supports non-zero data offsets.
		 */
		if (dev->fmt_cap->data_offset[p]) {
			memset(vbuf, dev->fmt_cap->data_offset[p] & 0xff,
			       dev->fmt_cap->data_offset[p]);
			vbuf += dev->fmt_cap->data_offset[p];
		}
		tpg_calc_text_basep(&dev->tpg, basep, p, vbuf);
		if (!is_loop || vivid_copy_buffer(dev, p, vbuf, buf))
			tpg_fillbuffer(&dev->tpg, vivid_get_std_cap(dev), p, vbuf);
	}
	dev->must_blank[buf->vb.v4l2_buf.index] = false;

	/* Updates stream time, only update at the start of a new frame. */
	if (dev->field_cap != V4L2_FIELD_ALTERNATE || (buf->vb.v4l2_buf.sequence & 1) == 0)
		dev->ms_vid_cap = jiffies_to_msecs(jiffies - dev->jiffies_vid_cap);

	ms = dev->ms_vid_cap;
	if (dev->osd_mode <= 1) {
		snprintf(str, sizeof(str), " %02d:%02d:%02d:%03d %u%s",
				(ms / (60 * 60 * 1000)) % 24,
				(ms / (60 * 1000)) % 60,
				(ms / 1000) % 60,
				ms % 1000,
				buf->vb.v4l2_buf.sequence,
				(dev->field_cap == V4L2_FIELD_ALTERNATE) ?
					(buf->vb.v4l2_buf.field == V4L2_FIELD_TOP ?
					 " top" : " bottom") : "");
		tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str);
	}
	if (dev->osd_mode == 0) {
		snprintf(str, sizeof(str), " %dx%d, input %d ",
				dev->src_rect.width, dev->src_rect.height, dev->input);
		tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str);

		gain = v4l2_ctrl_g_ctrl(dev->gain);
		mutex_lock(dev->ctrl_hdl_user_vid.lock);
		snprintf(str, sizeof(str),
			" brightness %3d, contrast %3d, saturation %3d, hue %d ",
			dev->brightness->cur.val,
			dev->contrast->cur.val,
			dev->saturation->cur.val,
			dev->hue->cur.val);
		tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str);
		snprintf(str, sizeof(str),
			" autogain %d, gain %3d, alpha 0x%02x ",
			dev->autogain->cur.val, gain, dev->alpha->cur.val);
		mutex_unlock(dev->ctrl_hdl_user_vid.lock);
		tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str);
		mutex_lock(dev->ctrl_hdl_user_aud.lock);
		snprintf(str, sizeof(str),
			" volume %3d, mute %d ",
			dev->volume->cur.val, dev->mute->cur.val);
		mutex_unlock(dev->ctrl_hdl_user_aud.lock);
		tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str);
		mutex_lock(dev->ctrl_hdl_user_gen.lock);
		snprintf(str, sizeof(str), " int32 %d, int64 %lld, bitmask %08x ",
			dev->int32->cur.val,
			*dev->int64->p_cur.p_s64,
			dev->bitmask->cur.val);
		tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str);
		snprintf(str, sizeof(str), " boolean %d, menu %s, string \"%s\" ",
			dev->boolean->cur.val,
			dev->menu->qmenu[dev->menu->cur.val],
			dev->string->p_cur.p_char);
		tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str);
		snprintf(str, sizeof(str), " integer_menu %lld, value %d ",
			dev->int_menu->qmenu_int[dev->int_menu->cur.val],
			dev->int_menu->cur.val);
		mutex_unlock(dev->ctrl_hdl_user_gen.lock);
		tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str);
		if (dev->button_pressed) {
			dev->button_pressed--;
			snprintf(str, sizeof(str), " button pressed!");
			tpg_gen_text(&dev->tpg, basep, line++ * line_height, 16, str);
		}
	}

	/*
	 * If "End of Frame" is specified at the timestamp source, then take
	 * the timestamp now.
	 */
	if (!dev->tstamp_src_is_soe)
		v4l2_get_timestamp(&buf->vb.v4l2_buf.timestamp);
	buf->vb.v4l2_buf.timestamp.tv_sec += dev->time_wrap_offset;
}
Example #6
0
static int vivid_vid_cap_s_ctrl(struct v4l2_ctrl *ctrl)
{
	static const u32 colorspaces[] = {
		V4L2_COLORSPACE_SMPTE170M,
		V4L2_COLORSPACE_REC709,
		V4L2_COLORSPACE_SRGB,
		V4L2_COLORSPACE_ADOBERGB,
		V4L2_COLORSPACE_BT2020,
		V4L2_COLORSPACE_SMPTE240M,
		V4L2_COLORSPACE_470_SYSTEM_M,
		V4L2_COLORSPACE_470_SYSTEM_BG,
	};
	struct vivid_dev *dev = container_of(ctrl->handler, struct vivid_dev, ctrl_hdl_vid_cap);
	unsigned i;

	switch (ctrl->id) {
	case VIVID_CID_TEST_PATTERN:
		vivid_update_quality(dev);
		tpg_s_pattern(&dev->tpg, ctrl->val);
		break;
	case VIVID_CID_COLORSPACE:
		tpg_s_colorspace(&dev->tpg, colorspaces[ctrl->val]);
		vivid_send_source_change(dev, TV);
		vivid_send_source_change(dev, SVID);
		vivid_send_source_change(dev, HDMI);
		vivid_send_source_change(dev, WEBCAM);
		break;
	case VIVID_CID_YCBCR_ENC:
		tpg_s_ycbcr_enc(&dev->tpg, ctrl->val);
		vivid_send_source_change(dev, TV);
		vivid_send_source_change(dev, SVID);
		vivid_send_source_change(dev, HDMI);
		vivid_send_source_change(dev, WEBCAM);
		break;
	case VIVID_CID_QUANTIZATION:
		tpg_s_quantization(&dev->tpg, ctrl->val);
		vivid_send_source_change(dev, TV);
		vivid_send_source_change(dev, SVID);
		vivid_send_source_change(dev, HDMI);
		vivid_send_source_change(dev, WEBCAM);
		break;
	case V4L2_CID_DV_RX_RGB_RANGE:
		if (!vivid_is_hdmi_cap(dev))
			break;
		tpg_s_rgb_range(&dev->tpg, ctrl->val);
		break;
	case VIVID_CID_LIMITED_RGB_RANGE:
		tpg_s_real_rgb_range(&dev->tpg, ctrl->val ?
				V4L2_DV_RGB_RANGE_LIMITED : V4L2_DV_RGB_RANGE_FULL);
		break;
	case VIVID_CID_ALPHA_MODE:
		tpg_s_alpha_mode(&dev->tpg, ctrl->val);
		break;
	case VIVID_CID_HOR_MOVEMENT:
		tpg_s_mv_hor_mode(&dev->tpg, ctrl->val);
		break;
	case VIVID_CID_VERT_MOVEMENT:
		tpg_s_mv_vert_mode(&dev->tpg, ctrl->val);
		break;
	case VIVID_CID_OSD_TEXT_MODE:
		dev->osd_mode = ctrl->val;
		break;
	case VIVID_CID_PERCENTAGE_FILL:
		tpg_s_perc_fill(&dev->tpg, ctrl->val);
		for (i = 0; i < VIDEO_MAX_FRAME; i++)
			dev->must_blank[i] = ctrl->val < 100;
		break;
	case VIVID_CID_INSERT_SAV:
		tpg_s_insert_sav(&dev->tpg, ctrl->val);
		break;
	case VIVID_CID_INSERT_EAV:
		tpg_s_insert_eav(&dev->tpg, ctrl->val);
		break;
	case VIVID_CID_HFLIP:
		dev->sensor_hflip = ctrl->val;
		tpg_s_hflip(&dev->tpg, dev->sensor_hflip ^ dev->hflip);
		break;
	case VIVID_CID_VFLIP:
		dev->sensor_vflip = ctrl->val;
		tpg_s_vflip(&dev->tpg, dev->sensor_vflip ^ dev->vflip);
		break;
	case VIVID_CID_HAS_CROP_CAP:
		dev->has_crop_cap = ctrl->val;
		vivid_update_format_cap(dev, true);
		break;
	case VIVID_CID_HAS_COMPOSE_CAP:
		dev->has_compose_cap = ctrl->val;
		vivid_update_format_cap(dev, true);
		break;
	case VIVID_CID_HAS_SCALER_CAP:
		dev->has_scaler_cap = ctrl->val;
		vivid_update_format_cap(dev, true);
		break;
	case VIVID_CID_SHOW_BORDER:
		tpg_s_show_border(&dev->tpg, ctrl->val);
		break;
	case VIVID_CID_SHOW_SQUARE:
		tpg_s_show_square(&dev->tpg, ctrl->val);
		break;
	case VIVID_CID_STD_ASPECT_RATIO:
		dev->std_aspect_ratio = ctrl->val;
		tpg_s_video_aspect(&dev->tpg, vivid_get_video_aspect(dev));
		break;
	case VIVID_CID_DV_TIMINGS_SIGNAL_MODE:
		dev->dv_timings_signal_mode = dev->ctrl_dv_timings_signal_mode->val;
		if (dev->dv_timings_signal_mode == SELECTED_DV_TIMINGS)
			dev->query_dv_timings = dev->ctrl_dv_timings->val;
		v4l2_ctrl_activate(dev->ctrl_dv_timings,
				dev->dv_timings_signal_mode == SELECTED_DV_TIMINGS);
		vivid_update_quality(dev);
		vivid_send_source_change(dev, HDMI);
		break;
	case VIVID_CID_DV_TIMINGS_ASPECT_RATIO:
		dev->dv_timings_aspect_ratio = ctrl->val;
		tpg_s_video_aspect(&dev->tpg, vivid_get_video_aspect(dev));
		break;
	case VIVID_CID_TSTAMP_SRC:
		dev->tstamp_src_is_soe = ctrl->val;
		dev->vb_vid_cap_q.timestamp_flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
		if (dev->tstamp_src_is_soe)
			dev->vb_vid_cap_q.timestamp_flags |= V4L2_BUF_FLAG_TSTAMP_SRC_SOE;
		break;
	case VIVID_CID_MAX_EDID_BLOCKS:
		dev->edid_max_blocks = ctrl->val;
		if (dev->edid_blocks > dev->edid_max_blocks)
			dev->edid_blocks = dev->edid_max_blocks;
		break;
	}
	return 0;
}