static int vpif_s_std(struct file *file, void *priv, v4l2_std_id *std_id)
{
	struct vpif_fh *fh = priv;
	struct channel_obj *ch = fh->channel;
	struct common_obj *common = &ch->common[VPIF_VIDEO_INDEX];
	int ret = 0;

	if (!(*std_id & DM646X_V4L2_STD))
		return -EINVAL;

	if (common->started) {
		vpif_err("streaming in progress\n");
		return -EBUSY;
	}

	/* Call encoder subdevice function to set the standard */
	if (mutex_lock_interruptible(&common->lock))
		return -ERESTARTSYS;

	ch->video.stdid = *std_id;
	/* Get the information about the standard */
	if (vpif_get_std_info(ch)) {
		vpif_err("Error getting the standard info\n");
		return -EINVAL;
	}

	if ((ch->vpifparams.std_info.width *
		ch->vpifparams.std_info.height * 2) >
		config_params.channel_bufsize[ch->channel_id]) {
		vpif_err("invalid std for this size\n");
		ret = -EINVAL;
		goto s_std_exit;
	}

	common->fmt.fmt.pix.bytesperline = common->fmt.fmt.pix.width;
	/* Configure the default format information */
	vpif_config_format(ch);

	ret = v4l2_device_call_until_err(&vpif_obj.v4l2_dev, 1, video,
						s_std_output, *std_id);
	if (ret < 0) {
		vpif_err("Failed to set output standard\n");
		goto s_std_exit;
	}

	ret = v4l2_device_call_until_err(&vpif_obj.v4l2_dev, 1, core,
							s_std, *std_id);
	if (ret < 0)
		vpif_err("Failed to set standard for sub devices\n");

s_std_exit:
	mutex_unlock(&common->lock);
	return ret;
}
Example #2
0
static int vpif_s_std(struct file *file, void *priv, v4l2_std_id *std_id)
{
	struct vpif_fh *fh = priv;
	struct channel_obj *ch = fh->channel;
	struct common_obj *common = &ch->common[VPIF_VIDEO_INDEX];
	int ret = 0;

	if (!(*std_id & DM646X_V4L2_STD))
		return -EINVAL;

	if (common->started) {
		vpif_err("streaming in progress\n");
		return -EBUSY;
	}

	/* Call encoder subdevice function to set the standard */
	ch->video.stdid = *std_id;
	ch->video.dv_preset = V4L2_DV_INVALID;
	memset(&ch->video.bt_timings, 0, sizeof(ch->video.bt_timings));

	/* Get the information about the standard */
	if (vpif_update_resolution(ch))
		return -EINVAL;

	if ((ch->vpifparams.std_info.width *
		ch->vpifparams.std_info.height * 2) >
		config_params.channel_bufsize[ch->channel_id]) {
		vpif_err("invalid std for this size\n");
		return -EINVAL;
	}

	common->fmt.fmt.pix.bytesperline = common->fmt.fmt.pix.width;
	/* Configure the default format information */
	vpif_config_format(ch);

	ret = v4l2_device_call_until_err(&vpif_obj.v4l2_dev, 1, video,
						s_std_output, *std_id);
	if (ret < 0) {
		vpif_err("Failed to set output standard\n");
		return ret;
	}

	ret = v4l2_device_call_until_err(&vpif_obj.v4l2_dev, 1, core,
							s_std, *std_id);
	if (ret < 0)
		vpif_err("Failed to set standard for sub devices\n");
	return ret;
}
static int vpif_s_output(struct file *file, void *priv, unsigned int i)
{
	struct vpif_fh *fh = priv;
	struct channel_obj *ch = fh->channel;
	struct video_obj *vid_ch = &ch->video;
	struct common_obj *common = &ch->common[VPIF_VIDEO_INDEX];
	int ret = 0;

	if (mutex_lock_interruptible(&common->lock))
		return -ERESTARTSYS;

	if (common->started) {
		vpif_err("Streaming in progress\n");
		ret = -EBUSY;
		goto s_output_exit;
	}

	ret = v4l2_device_call_until_err(&vpif_obj.v4l2_dev, 1, video,
							s_routing, 0, i, 0);

	if (ret < 0)
		vpif_err("Failed to set output standard\n");

	vid_ch->output_id = i;

s_output_exit:
	mutex_unlock(&common->lock);
	return ret;
}
Example #4
0
/*
 * vpif_g_chip_ident() - Identify the chip
 * @file: file ptr
 * @priv: file handle
 * @chip: chip identity
 *
 * Returns zero or -EINVAL if read operations fails.
 */
static int vpif_g_chip_ident(struct file *file, void *priv,
		struct v4l2_dbg_chip_ident *chip)
{
	chip->ident = V4L2_IDENT_NONE;
	chip->revision = 0;
	if (chip->match.type != V4L2_CHIP_MATCH_I2C_DRIVER &&
			chip->match.type != V4L2_CHIP_MATCH_I2C_ADDR) {
		vpif_dbg(2, debug, "match_type is invalid.\n");
		return -EINVAL;
	}

	return v4l2_device_call_until_err(&vpif_obj.v4l2_dev, 0, core,
			g_chip_ident, chip);
}
static int vidioc_g_chip_ident(struct file *file, void *__fh,
		struct v4l2_dbg_chip_ident *chip)
{
	struct saa7146_fh *fh = __fh;
	struct saa7146_dev *dev = fh->dev;

	chip->ident = V4L2_IDENT_NONE;
	chip->revision = 0;
	if (chip->match.type == V4L2_CHIP_MATCH_HOST && !chip->match.addr) {
		chip->ident = V4L2_IDENT_SAA7146;
		return 0;
	}
	return v4l2_device_call_until_err(&dev->v4l2_dev, 0,
			core, g_chip_ident, chip);
}
Example #6
0
static int vidioc_g_chip_ident(struct file *file, void *__fh,
		struct v4l2_dbg_chip_ident *chip)
{
	struct saa7146_fh *fh = __fh;
	struct saa7146_dev *dev = fh->dev;

	chip->ident = V4L2_IDENT_NONE;
	chip->revision = 0;
	if (chip->match.type == V4L2_CHIP_MATCH_HOST) {
		if (v4l2_chip_match_host(&chip->match))
			chip->ident = V4L2_IDENT_SAA7146;
		return 0;
	}
	if (chip->match.type != V4L2_CHIP_MATCH_I2C_DRIVER &&
	    chip->match.type != V4L2_CHIP_MATCH_I2C_ADDR)
		return -EINVAL;
	return v4l2_device_call_until_err(&dev->v4l2_dev, 0,
			core, g_chip_ident, chip);
}
Example #7
0
/*
 * The common for both scaling and cropping iterative approach is:
 * 1. try if the client can produce exactly what requested by the user
 * 2. if (1) failed, try to double the client image until we get one big enough
 * 3. if (2) failed, try to request the maximum image
 */
int soc_camera_client_s_selection(struct v4l2_subdev *sd,
			struct v4l2_selection *sel, struct v4l2_selection *cam_sel,
			struct v4l2_rect *target_rect, struct v4l2_rect *subrect)
{
	struct v4l2_subdev_selection sdsel = {
		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
		.target = sel->target,
		.flags = sel->flags,
		.r = sel->r,
	};
	struct v4l2_subdev_selection bounds = {
		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
		.target = V4L2_SEL_TGT_CROP_BOUNDS,
	};
	struct v4l2_rect *rect = &sel->r, *cam_rect = &cam_sel->r;
	struct device *dev = sd->v4l2_dev->dev;
	int ret;
	unsigned int width, height;

	v4l2_subdev_call(sd, pad, set_selection, NULL, &sdsel);
	sel->r = sdsel.r;
	ret = soc_camera_client_g_rect(sd, cam_rect);
	if (ret < 0)
		return ret;

	/*
	 * Now cam_crop contains the current camera input rectangle, and it must
	 * be within camera cropcap bounds
	 */
	if (!memcmp(rect, cam_rect, sizeof(*rect))) {
		/* Even if camera S_SELECTION failed, but camera rectangle matches */
		dev_dbg(dev, "Camera S_SELECTION successful for %dx%d@%d:%d\n",
			rect->width, rect->height, rect->left, rect->top);
		*target_rect = *cam_rect;
		return 0;
	}

	/* Try to fix cropping, that camera hasn't managed to set */
	dev_geo(dev, "Fix camera S_SELECTION for %dx%d@%d:%d to %dx%d@%d:%d\n",
		cam_rect->width, cam_rect->height,
		cam_rect->left, cam_rect->top,
		rect->width, rect->height, rect->left, rect->top);

	/* We need sensor maximum rectangle */
	ret = v4l2_subdev_call(sd, pad, get_selection, NULL, &bounds);
	if (ret < 0)
		return ret;

	/* Put user requested rectangle within sensor bounds */
	soc_camera_limit_side(&rect->left, &rect->width, sdsel.r.left, 2,
			      bounds.r.width);
	soc_camera_limit_side(&rect->top, &rect->height, sdsel.r.top, 4,
			      bounds.r.height);

	/*
	 * Popular special case - some cameras can only handle fixed sizes like
	 * QVGA, VGA,... Take care to avoid infinite loop.
	 */
	width = max_t(unsigned int, cam_rect->width, 2);
	height = max_t(unsigned int, cam_rect->height, 2);

	/*
	 * Loop as long as sensor is not covering the requested rectangle and
	 * is still within its bounds
	 */
	while (!ret && (is_smaller(cam_rect, rect) ||
			is_inside(cam_rect, rect)) &&
	       (bounds.r.width > width || bounds.r.height > height)) {

		width *= 2;
		height *= 2;

		cam_rect->width = width;
		cam_rect->height = height;

		/*
		 * We do not know what capabilities the camera has to set up
		 * left and top borders. We could try to be smarter in iterating
		 * them, e.g., if camera current left is to the right of the
		 * target left, set it to the middle point between the current
		 * left and minimum left. But that would add too much
		 * complexity: we would have to iterate each border separately.
		 * Instead we just drop to the left and top bounds.
		 */
		if (cam_rect->left > rect->left)
			cam_rect->left = bounds.r.left;

		if (cam_rect->left + cam_rect->width < rect->left + rect->width)
			cam_rect->width = rect->left + rect->width -
				cam_rect->left;

		if (cam_rect->top > rect->top)
			cam_rect->top = bounds.r.top;

		if (cam_rect->top + cam_rect->height < rect->top + rect->height)
			cam_rect->height = rect->top + rect->height -
				cam_rect->top;

		sdsel.r = *cam_rect;
		v4l2_subdev_call(sd, pad, set_selection, NULL, &sdsel);
		*cam_rect = sdsel.r;
		ret = soc_camera_client_g_rect(sd, cam_rect);
		dev_geo(dev, "Camera S_SELECTION %d for %dx%d@%d:%d\n", ret,
			cam_rect->width, cam_rect->height,
			cam_rect->left, cam_rect->top);
	}

	/* S_SELECTION must not modify the rectangle */
	if (is_smaller(cam_rect, rect) || is_inside(cam_rect, rect)) {
		/*
		 * The camera failed to configure a suitable cropping,
		 * we cannot use the current rectangle, set to max
		 */
		sdsel.r = bounds.r;
		v4l2_subdev_call(sd, pad, set_selection, NULL, &sdsel);
		*cam_rect = sdsel.r;

		ret = soc_camera_client_g_rect(sd, cam_rect);
		dev_geo(dev, "Camera S_SELECTION %d for max %dx%d@%d:%d\n", ret,
			cam_rect->width, cam_rect->height,
			cam_rect->left, cam_rect->top);
	}

	if (!ret) {
		*target_rect = *cam_rect;
		move_and_crop_subrect(target_rect, subrect);
	}

	return ret;
}
EXPORT_SYMBOL(soc_camera_client_s_selection);

/* Iterative set_fmt, also updates cached client crop on success */
static int client_set_fmt(struct soc_camera_device *icd,
			struct v4l2_rect *rect, struct v4l2_rect *subrect,
			unsigned int max_width, unsigned int max_height,
			struct v4l2_subdev_format *format, bool host_can_scale)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	struct device *dev = icd->parent;
	struct v4l2_mbus_framefmt *mf = &format->format;
	unsigned int width = mf->width, height = mf->height, tmp_w, tmp_h;
	struct v4l2_subdev_selection sdsel = {
		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
		.target = V4L2_SEL_TGT_CROP_BOUNDS,
	};
	bool host_1to1;
	int ret;

	ret = v4l2_device_call_until_err(sd->v4l2_dev,
					 soc_camera_grp_id(icd), pad,
					 set_fmt, NULL, format);
	if (ret < 0)
		return ret;

	dev_geo(dev, "camera scaled to %ux%u\n", mf->width, mf->height);

	if (width == mf->width && height == mf->height) {
		/* Perfect! The client has done it all. */
		host_1to1 = true;
		goto update_cache;
	}

	host_1to1 = false;
	if (!host_can_scale)
		goto update_cache;

	ret = v4l2_subdev_call(sd, pad, get_selection, NULL, &sdsel);
	if (ret < 0)
		return ret;

	if (max_width > sdsel.r.width)
		max_width = sdsel.r.width;
	if (max_height > sdsel.r.height)
		max_height = sdsel.r.height;

	/* Camera set a format, but geometry is not precise, try to improve */
	tmp_w = mf->width;
	tmp_h = mf->height;

	/* width <= max_width && height <= max_height - guaranteed by try_fmt */
	while ((width > tmp_w || height > tmp_h) &&
	       tmp_w < max_width && tmp_h < max_height) {
		tmp_w = min(2 * tmp_w, max_width);
		tmp_h = min(2 * tmp_h, max_height);
		mf->width = tmp_w;
		mf->height = tmp_h;
		ret = v4l2_device_call_until_err(sd->v4l2_dev,
					soc_camera_grp_id(icd), pad,
					set_fmt, NULL, format);
		dev_geo(dev, "Camera scaled to %ux%u\n",
			mf->width, mf->height);
		if (ret < 0) {
			/* This shouldn't happen */
			dev_err(dev, "Client failed to set format: %d\n", ret);
			return ret;
		}
	}

update_cache:
	/* Update cache */
	ret = soc_camera_client_g_rect(sd, rect);
	if (ret < 0)
		return ret;

	if (host_1to1)
		*subrect = *rect;
	else
		move_and_crop_subrect(rect, subrect);

	return 0;
}

/**
 * soc_camera_client_scale
 * @icd:		soc-camera device
 * @rect:		camera cropping window
 * @subrect:		part of rect, sent to the user
 * @mf:			in- / output camera output window
 * @width:		on input: max host input width;
 *			on output: user width, mapped back to input
 * @height:		on input: max host input height;
 *			on output: user height, mapped back to input
 * @host_can_scale:	host can scale this pixel format
 * @shift:		shift, used for scaling
 */
int soc_camera_client_scale(struct soc_camera_device *icd,
			struct v4l2_rect *rect, struct v4l2_rect *subrect,
			struct v4l2_mbus_framefmt *mf,
			unsigned int *width, unsigned int *height,
			bool host_can_scale, unsigned int shift)
{
	struct device *dev = icd->parent;
	struct v4l2_subdev_format fmt_tmp = {
		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
		.format = *mf,
	};
	struct v4l2_mbus_framefmt *mf_tmp = &fmt_tmp.format;
	unsigned int scale_h, scale_v;
	int ret;

	/*
	 * 5. Apply iterative camera S_FMT for camera user window (also updates
	 *    client crop cache and the imaginary sub-rectangle).
	 */
	ret = client_set_fmt(icd, rect, subrect, *width, *height,
			   &fmt_tmp, host_can_scale);
	if (ret < 0)
		return ret;

	dev_geo(dev, "5: camera scaled to %ux%u\n",
		mf_tmp->width, mf_tmp->height);

	/* 6. Retrieve camera output window (g_fmt) */

	/* unneeded - it is already in "mf_tmp" */

	/* 7. Calculate new client scales. */
	scale_h = soc_camera_calc_scale(rect->width, shift, mf_tmp->width);
	scale_v = soc_camera_calc_scale(rect->height, shift, mf_tmp->height);

	mf->width	= mf_tmp->width;
	mf->height	= mf_tmp->height;
	mf->colorspace	= mf_tmp->colorspace;

	/*
	 * 8. Calculate new host crop - apply camera scales to previously
	 *    updated "effective" crop.
	 */
	*width = soc_camera_shift_scale(subrect->width, shift, scale_h);
	*height = soc_camera_shift_scale(subrect->height, shift, scale_v);

	dev_geo(dev, "8: new client sub-window %ux%u\n", *width, *height);

	return 0;
}
EXPORT_SYMBOL(soc_camera_client_scale);

/*
 * Calculate real client output window by applying new scales to the current
 * client crop. New scales are calculated from the requested output format and
 * host crop, mapped backed onto the client input (subrect).
 */
void soc_camera_calc_client_output(struct soc_camera_device *icd,
		struct v4l2_rect *rect, struct v4l2_rect *subrect,
		const struct v4l2_pix_format *pix, struct v4l2_mbus_framefmt *mf,
		unsigned int shift)
{
	struct device *dev = icd->parent;
	unsigned int scale_v, scale_h;

	if (subrect->width == rect->width &&
	    subrect->height == rect->height) {
		/* No sub-cropping */
		mf->width	= pix->width;
		mf->height	= pix->height;
		return;
	}

	/* 1.-2. Current camera scales and subwin - cached. */

	dev_geo(dev, "2: subwin %ux%u@%u:%u\n",
		subrect->width, subrect->height,
		subrect->left, subrect->top);

	/*
	 * 3. Calculate new combined scales from input sub-window to requested
	 *    user window.
	 */

	/*
	 * TODO: CEU cannot scale images larger than VGA to smaller than SubQCIF
	 * (128x96) or larger than VGA. This and similar limitations have to be
	 * taken into account here.
	 */
	scale_h = soc_camera_calc_scale(subrect->width, shift, pix->width);
	scale_v = soc_camera_calc_scale(subrect->height, shift, pix->height);

	dev_geo(dev, "3: scales %u:%u\n", scale_h, scale_v);

	/*
	 * 4. Calculate desired client output window by applying combined scales
	 *    to client (real) input window.
	 */
	mf->width = soc_camera_shift_scale(rect->width, shift, scale_h);
	mf->height = soc_camera_shift_scale(rect->height, shift, scale_v);
}
EXPORT_SYMBOL(soc_camera_calc_client_output);

MODULE_DESCRIPTION("soc-camera scaling-cropping functions");
MODULE_AUTHOR("Guennadi Liakhovetski <*****@*****.**>");
MODULE_LICENSE("GPL");
Example #8
0
/* Iterative s_mbus_fmt, also updates cached client crop on success */
static int client_s_fmt(struct soc_camera_device *icd,
			struct v4l2_rect *rect, struct v4l2_rect *subrect,
			unsigned int max_width, unsigned int max_height,
			struct v4l2_mbus_framefmt *mf, bool host_can_scale)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	struct device *dev = icd->parent;
	unsigned int width = mf->width, height = mf->height, tmp_w, tmp_h;
	struct v4l2_cropcap cap;
	bool host_1to1;
	int ret;

	ret = v4l2_device_call_until_err(sd->v4l2_dev,
					 soc_camera_grp_id(icd), video,
					 s_mbus_fmt, mf);
	if (ret < 0)
		return ret;

	dev_geo(dev, "camera scaled to %ux%u\n", mf->width, mf->height);

	if (width == mf->width && height == mf->height) {
		/* Perfect! The client has done it all. */
		host_1to1 = true;
		goto update_cache;
	}

	host_1to1 = false;
	if (!host_can_scale)
		goto update_cache;

	cap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

	ret = v4l2_subdev_call(sd, video, cropcap, &cap);
	if (ret < 0)
		return ret;

	if (max_width > cap.bounds.width)
		max_width = cap.bounds.width;
	if (max_height > cap.bounds.height)
		max_height = cap.bounds.height;

	/* Camera set a format, but geometry is not precise, try to improve */
	tmp_w = mf->width;
	tmp_h = mf->height;

	/* width <= max_width && height <= max_height - guaranteed by try_fmt */
	while ((width > tmp_w || height > tmp_h) &&
	       tmp_w < max_width && tmp_h < max_height) {
		tmp_w = min(2 * tmp_w, max_width);
		tmp_h = min(2 * tmp_h, max_height);
		mf->width = tmp_w;
		mf->height = tmp_h;
		ret = v4l2_device_call_until_err(sd->v4l2_dev,
					soc_camera_grp_id(icd), video,
					s_mbus_fmt, mf);
		dev_geo(dev, "Camera scaled to %ux%u\n",
			mf->width, mf->height);
		if (ret < 0) {
			/* This shouldn't happen */
			dev_err(dev, "Client failed to set format: %d\n", ret);
			return ret;
		}
	}

update_cache:
	/* Update cache */
	ret = soc_camera_client_g_rect(sd, rect);
	if (ret < 0)
		return ret;

	if (host_1to1)
		*subrect = *rect;
	else
		update_subrect(rect, subrect);

	return 0;
}