Beispiel #1
0
static int fimc_cap_try_fmt_mplane(struct file *file, void *fh,
				   struct v4l2_format *f)
{
	struct v4l2_pix_format_mplane *pix = &f->fmt.pix_mp;
	struct fimc_dev *fimc = video_drvdata(file);
	struct fimc_ctx *ctx = fimc->vid_cap.ctx;
	struct v4l2_mbus_framefmt mf;
	struct fimc_fmt *ffmt = NULL;

	if (f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
		return -EINVAL;

	if (pix->pixelformat == V4L2_PIX_FMT_JPEG) {
		fimc_capture_try_format(ctx, &pix->width, &pix->height,
					NULL, &pix->pixelformat,
					FIMC_SD_PAD_SINK);
		ctx->s_frame.f_width  = pix->width;
		ctx->s_frame.f_height = pix->height;
	}
	ffmt = fimc_capture_try_format(ctx, &pix->width, &pix->height,
				       NULL, &pix->pixelformat,
				       FIMC_SD_PAD_SOURCE);
	if (!ffmt)
		return -EINVAL;

	if (!fimc->vid_cap.user_subdev_api) {
		mf.width  = pix->width;
		mf.height = pix->height;
		mf.code   = ffmt->mbus_code;
		fimc_md_graph_lock(fimc);
		fimc_pipeline_try_format(ctx, &mf, &ffmt, false);
		fimc_md_graph_unlock(fimc);

		pix->width	 = mf.width;
		pix->height	 = mf.height;
		if (ffmt)
			pix->pixelformat = ffmt->fourcc;
	}

	fimc_adjust_mplane_format(ffmt, pix->width, pix->height, pix);
	return 0;
}
Beispiel #2
0
static int
iss_video_streamoff(struct file *file, void *fh, enum v4l2_buf_type type)
{
	struct iss_video_fh *vfh = to_iss_video_fh(fh);
	struct iss_video *video = video_drvdata(file);
	struct iss_pipeline *pipe = to_iss_pipeline(&video->video.entity);
	enum iss_pipeline_state state;
	unsigned long flags;

	if (type != video->type)
		return -EINVAL;

	mutex_lock(&video->stream_lock);

	if (!vb2_is_streaming(&vfh->queue))
		goto done;

	/* Update the pipeline state. */
	if (video->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
		state = ISS_PIPELINE_STREAM_OUTPUT
		      | ISS_PIPELINE_QUEUE_OUTPUT;
	else
		state = ISS_PIPELINE_STREAM_INPUT
		      | ISS_PIPELINE_QUEUE_INPUT;

	spin_lock_irqsave(&pipe->lock, flags);
	pipe->state &= ~state;
	spin_unlock_irqrestore(&pipe->lock, flags);

	/* Stop the stream. */
	omap4iss_pipeline_set_stream(pipe, ISS_PIPELINE_STREAM_STOPPED);
	vb2_streamoff(&vfh->queue, type);
	video->queue = NULL;

	if (video->iss->pdata->set_constraints)
		video->iss->pdata->set_constraints(video->iss, false);
	media_entity_pipeline_stop(&video->video.entity);

done:
	mutex_unlock(&video->stream_lock);
	return 0;
}
static int tea5764_open(struct file *file)
{
	/* Currently we support only one device */
	int minor = video_devdata(file)->minor;
	struct tea5764_device *radio = video_drvdata(file);

	if (radio->videodev->minor != minor)
		return -ENODEV;

	mutex_lock(&radio->mutex);
	/* Only exclusive access */
	if (radio->users) {
		mutex_unlock(&radio->mutex);
		return -EBUSY;
	}
	radio->users++;
	mutex_unlock(&radio->mutex);
	file->private_data = radio;
	return 0;
}
static int mxr_s_output(struct file *file, void *fh, unsigned int i)
{
	struct video_device *vfd = video_devdata(file);
	struct mxr_layer *layer = video_drvdata(file);
	struct mxr_device *mdev = layer->mdev;
	int ret = 0;

	if (i >= mdev->output_cnt || mdev->output[i] == NULL)
		return -EINVAL;

	mutex_lock(&mdev->mutex);
	mdev->current_output = i;
	vfd->tvnorms = 0;
	v4l2_subdev_call(to_outsd(mdev), video, g_tvnorms_output,
		&vfd->tvnorms);
	mutex_unlock(&mdev->mutex);
	mxr_dbg(mdev, "tvnorms = %08llx\n", vfd->tvnorms);

	return ret;
}
static int mxr_cropcap(struct file *file, void *fh, struct v4l2_cropcap *a)
{
	struct mxr_layer *layer = video_drvdata(file);
	struct mxr_crop *crop;

	mxr_dbg(layer->mdev, "%s:%d\n", __func__, __LINE__);
	crop = choose_crop_by_type(&layer->geo, a->type);
	if (crop == NULL)
		return -EINVAL;
	mxr_layer_geo_fix(layer);
	a->bounds.left = 0;
	a->bounds.top = 0;
	a->bounds.width = crop->full_width;
	a->bounds.top = crop->full_height;
	a->defrect = a->bounds;
	/* setting pixel aspect to 1/1 */
	a->pixelaspect.numerator = 1;
	a->pixelaspect.denominator = 1;
	return 0;
}
Beispiel #6
0
static int dt3155_s_std(struct file *filp, void *p, v4l2_std_id norm)
{
	struct dt3155_priv *pd = video_drvdata(filp);

	if (pd->std == norm)
		return 0;
	if (vb2_is_busy(&pd->vidq))
		return -EBUSY;
	pd->std = norm;
	if (pd->std & V4L2_STD_525_60) {
		pd->csr2 = VT_60HZ;
		pd->width = 640;
		pd->height = 480;
	} else {
		pd->csr2 = VT_50HZ;
		pd->width = 768;
		pd->height = 576;
	}
	return 0;
}
Beispiel #7
0
/*
 * si470x_fops_poll - poll RDS data
 */
static unsigned int si470x_fops_poll(struct file *file,
		struct poll_table_struct *pts)
{
	struct si470x_device *radio = video_drvdata(file);
	int retval = 0;

	/* switch on rds reception */

	mutex_lock(&radio->lock);
	if ((radio->registers[SYSCONFIG1] & SYSCONFIG1_RDS) == 0)
		si470x_rds_on(radio);
	mutex_unlock(&radio->lock);

	poll_wait(file, &radio->read_queue, pts);

	if (radio->rd_index != radio->wr_index)
		retval = POLLIN | POLLRDNORM;

	return retval;
}
static unsigned int
hwcam_dev_vo_poll(
        struct file* filep,
        struct poll_table_struct* wait)
{
	unsigned int rc = 0;
    hwcam_dev_t* cam = video_drvdata(filep);
	hwcam_user_t* user = VO2USER(filep->private_data);
	BUG_ON(!cam || !user);

    if (user->f_format_valid) {
        rc = vb2_poll(&user->vb2q, filep, wait);
    }
    poll_wait(filep, &user->eq.wait, wait);
    if (v4l2_event_pending(&user->eq)) {
        rc |= POLLPRI | POLLOUT | POLLIN;
    }

	return rc;
}
Beispiel #9
0
/*
 * si470x_fops_poll - poll RDS data
 */
static unsigned int si470x_fops_poll(struct file *file,
		struct poll_table_struct *pts)
{
	struct si470x_device *radio = video_drvdata(file);
	int retval = 0;

	/* switch on rds reception */
	if ((radio->registers[SYSCONFIG1] & SYSCONFIG1_RDS) == 0) {
		si470x_rds_on(radio);
		schedule_delayed_work(&radio->work,
			msecs_to_jiffies(rds_poll_time));
	}

	poll_wait(file, &radio->read_queue, pts);

	if (radio->rd_index != radio->wr_index)
		retval = POLLIN | POLLRDNORM;

	return retval;
}
Beispiel #10
0
static int usbtv_enum_input(struct file *file, void *priv,
					struct v4l2_input *i)
{
	struct usbtv *dev = video_drvdata(file);

	switch (i->index) {
	case USBTV_COMPOSITE_INPUT:
		strlcpy(i->name, "Composite", sizeof(i->name));
		break;
	case USBTV_SVIDEO_INPUT:
		strlcpy(i->name, "S-Video", sizeof(i->name));
		break;
	default:
		return -EINVAL;
	}

	i->type = V4L2_INPUT_TYPE_CAMERA;
	i->std = dev->vdev.tvnorms;
	return 0;
}
Beispiel #11
0
static int fimc_lite_g_fmt_mplane(struct file *file, void *fh,
				  struct v4l2_format *f)
{
	struct fimc_lite *fimc = video_drvdata(file);
	struct v4l2_pix_format_mplane *pixm = &f->fmt.pix_mp;
	struct v4l2_plane_pix_format *plane_fmt = &pixm->plane_fmt[0];
	struct flite_frame *frame = &fimc->out_frame;
	const struct fimc_fmt *fmt = fimc->fmt;

	plane_fmt->bytesperline = (frame->f_width * fmt->depth[0]) / 8;
	plane_fmt->sizeimage = plane_fmt->bytesperline * frame->f_height;

	pixm->num_planes = fmt->memplanes;
	pixm->pixelformat = fmt->fourcc;
	pixm->width = frame->f_width;
	pixm->height = frame->f_height;
	pixm->field = V4L2_FIELD_NONE;
	pixm->colorspace = V4L2_COLORSPACE_JPEG;
	return 0;
}
static int
hwcam_dev_vo_s_fmt_vid_cap_mplane(
        struct file* filep,
        void* fh,
        struct v4l2_format* pfmt)
{
	int rc = -EINVAL;
    hwcam_dev_t* cam = video_drvdata(filep);
	hwcam_user_t* user = VO2USER(fh);
	BUG_ON(!cam || !user);
	if (pfmt->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
            && user->stream) {
        rc = hwcam_cfgstream_intf_try_fmt(user->stream, pfmt);
        if (rc == 0) {
            user->format = *pfmt;
            user->f_format_valid = 1;
        }
	}
	return rc;
}
Beispiel #13
0
void wfd_vidbuf_buf_cleanup(struct vb2_buffer *vb)
{
	int rc = 0;
	struct vb2_queue *q = vb->vb2_queue;
	struct file *priv_data = (struct file *)(q->drv_priv);
	struct wfd_device *wfd_dev =
		(struct wfd_device *)video_drvdata(priv_data);
	struct wfd_inst *inst = (struct wfd_inst *)priv_data->private_data;
	struct mdp_buf_info buf = {
					inst->mdp_inst,
					vb,
					inst->minfo[vb->v4l2_buf.index]->fd,
					inst->minfo[vb->v4l2_buf.index]->offset
					};
	WFD_MSG_DBG("Releasing buffer\n");
	rc = v4l2_subdev_call(&wfd_dev->mdp_sdev, core, ioctl,
			 MDP_RELEASE_BUF, (void *)&buf);
	if (rc)
		WFD_MSG_ERR("Failed to release the buffer\n");
}
/*
 * si470x_fops_poll - poll RDS data
 */
static unsigned int si470x_fops_poll(struct file *file,
		struct poll_table_struct *pts)
{
	struct si470x_device *radio = video_drvdata(file);
	unsigned long req_events = poll_requested_events(pts);
	int retval = v4l2_ctrl_poll(file, pts);

	if (req_events & (POLLIN | POLLRDNORM)) {
		/* switch on rds reception */
		if ((radio->registers[SYSCONFIG1] & SYSCONFIG1_RDS) == 0)
			si470x_rds_on(radio);

		poll_wait(file, &radio->read_queue, pts);

		if (radio->rd_index != radio->wr_index)
			retval |= POLLIN | POLLRDNORM;
	}

	return retval;
}
Beispiel #15
0
static int cpia2_cropcap(struct file *file, void *fh, struct v4l2_cropcap *c)
{
	struct camera_data *cam = video_drvdata(file);

	if (c->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
	       return -EINVAL;

	c->bounds.left = 0;
	c->bounds.top = 0;
	c->bounds.width = cam->width;
	c->bounds.height = cam->height;
	c->defrect.left = 0;
	c->defrect.top = 0;
	c->defrect.width = cam->width;
	c->defrect.height = cam->height;
	c->pixelaspect.numerator = 1;
	c->pixelaspect.denominator = 1;

	return 0;
}
Beispiel #16
0
/* Reqeust buffers */
static int vidioc_reqbufs(struct file *file, void *priv,
					  struct v4l2_requestbuffers *reqbufs)
{
	struct s5p_mfc_dev *dev = video_drvdata(file);
	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);

	if (reqbufs->memory != V4L2_MEMORY_MMAP) {
		mfc_err("Only V4L2_MEMORY_MAP is supported\n");
		return -EINVAL;
	}

	if (reqbufs->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
		return reqbufs_output(dev, ctx, reqbufs);
	} else if (reqbufs->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
		return reqbufs_capture(dev, ctx, reqbufs);
	} else {
		mfc_err("Invalid type requested\n");
		return -EINVAL;
	}
}
Beispiel #17
0
static int gsc_capture_enum_input(struct file *file, void *priv,
			       struct v4l2_input *i)
{
	struct gsc_dev *gsc = video_drvdata(file);
	struct exynos_platform_gscaler *pdata = gsc->pdata;
	struct exynos_isp_info *isp_info;

	if (i->index >= MAX_CAMIF_CLIENTS)
		return -EINVAL;

	isp_info = pdata->isp_info[i->index];
	if (isp_info == NULL)
		return -EINVAL;

	i->type = V4L2_INPUT_TYPE_CAMERA;

	strncpy(i->name, isp_info->board_info->type, 32);

	return 0;
}
Beispiel #18
0
static int cpia2_s_fmt_vid_cap(struct file *file, void *_fh,
					struct v4l2_format *f)
{
	struct camera_data *cam = video_drvdata(file);
	int err, frame;

	err = cpia2_try_fmt_vid_cap(file, _fh, f);
	if(err != 0)
		return err;

	cam->pixelformat = f->fmt.pix.pixelformat;

	/* NOTE: This should be set to 1 for MJPEG, but some apps don't handle
	 * the missing Huffman table properly. */
	cam->params.compression.inhibit_htables = 0;
		/*f->fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG;*/

	/* we set the video window to something smaller or equal to what
	 * is requested by the user???
	 */
	DBG("Requested width = %d, height = %d\n",
	    f->fmt.pix.width, f->fmt.pix.height);
	if (f->fmt.pix.width != cam->width ||
	    f->fmt.pix.height != cam->height) {
		cam->width = f->fmt.pix.width;
		cam->height = f->fmt.pix.height;
		cam->params.roi.width = f->fmt.pix.width;
		cam->params.roi.height = f->fmt.pix.height;
		cpia2_set_format(cam);
	}

	for (frame = 0; frame < cam->num_frames; ++frame) {
		if (cam->buffers[frame].status == FRAME_READING)
			if ((err = sync(cam, frame)) < 0)
				return err;

		cam->buffers[frame].status = FRAME_EMPTY;
	}

	return 0;
}
Beispiel #19
0
static int cpia2_querybuf(struct file *file, void *fh, struct v4l2_buffer *buf)
{
	struct camera_data *cam = video_drvdata(file);

	if(buf->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
	   buf->index >= cam->num_frames)
		return -EINVAL;

	buf->m.offset = cam->buffers[buf->index].data - cam->frame_buffer;
	buf->length = cam->frame_size;

	buf->memory = V4L2_MEMORY_MMAP;

	if(cam->mmapped)
		buf->flags = V4L2_BUF_FLAG_MAPPED;
	else
		buf->flags = 0;

	buf->flags |= V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC;

	switch (cam->buffers[buf->index].status) {
	case FRAME_EMPTY:
	case FRAME_ERROR:
	case FRAME_READING:
		buf->bytesused = 0;
		buf->flags = V4L2_BUF_FLAG_QUEUED;
		break;
	case FRAME_READY:
		buf->bytesused = cam->buffers[buf->index].length;
		buf->timestamp = ns_to_timeval(cam->buffers[buf->index].ts);
		buf->sequence = cam->buffers[buf->index].seq;
		buf->flags = V4L2_BUF_FLAG_DONE;
		break;
	}

	DBG("QUERYBUF index:%d offset:%d flags:%d seq:%d bytesused:%d\n",
	     buf->index, buf->m.offset, buf->flags, buf->sequence,
	     buf->bytesused);

	return 0;
}
Beispiel #20
0
static int cpia2_s_jpegcomp(struct file *file, void *fh, struct v4l2_jpegcompression *parms)
{
	struct camera_data *cam = video_drvdata(file);

	DBG("S_JPEGCOMP APP_len:%d COM_len:%d\n",
	    parms->APP_len, parms->COM_len);

	cam->params.compression.inhibit_htables =
		!(parms->jpeg_markers & V4L2_JPEG_MARKER_DHT);
	parms->jpeg_markers &= V4L2_JPEG_MARKER_DQT | V4L2_JPEG_MARKER_DRI |
			       V4L2_JPEG_MARKER_DHT;

	if(parms->APP_len != 0) {
		if(parms->APP_len > 0 &&
		   parms->APP_len <= sizeof(cam->APP_data) &&
		   parms->APPn >= 0 && parms->APPn <= 15) {
			cam->APPn = parms->APPn;
			cam->APP_len = parms->APP_len;
			memcpy(cam->APP_data, parms->APP_data, parms->APP_len);
		} else {
			LOG("Bad APPn Params n=%d len=%d\n",
			    parms->APPn, parms->APP_len);
			return -EINVAL;
		}
	} else {
		cam->APP_len = 0;
	}

	if(parms->COM_len != 0) {
		if(parms->COM_len > 0 &&
		   parms->COM_len <= sizeof(cam->COM_data)) {
			cam->COM_len = parms->COM_len;
			memcpy(cam->COM_data, parms->COM_data, parms->COM_len);
		} else {
			LOG("Bad COM_len=%d\n", parms->COM_len);
			return -EINVAL;
		}
	}

	return 0;
}
int si470x_fops_release(struct file *file)
{
	struct si470x_device *radio = video_drvdata(file);
	int retval = 0;

	/*              */
	if (!radio) {
		retval = -ENODEV;
		goto done;
	}

	mutex_lock(&radio->lock);
	radio->users--;
	if (radio->users == 0) {
		/*                            */
		if (radio->int_in_running) {
			radio->int_in_running = 0;
		if (radio->int_in_urb)
			usb_kill_urb(radio->int_in_urb);
		}

		if (radio->disconnected) {
			video_unregister_device(radio->videodev);
			kfree(radio->int_in_buffer);
			kfree(radio->buffer);
			mutex_unlock(&radio->lock);
			kfree(radio);
			goto done;
		}

		/*                       */
		wake_up_interruptible(&radio->read_queue);

		/*            */
		retval = si470x_stop(radio);
		usb_autopm_put_interface(radio->intf);
	}
	mutex_unlock(&radio->lock);
done:
	return retval;
}
Beispiel #22
0
static int gsc_capture_open(struct file *file)
{
	struct gsc_dev *gsc = video_drvdata(file);
	int ret = v4l2_fh_open(file);

	if (ret)
		return ret;

	if (gsc_m2m_opened(gsc) || gsc_out_opened(gsc) || gsc_cap_opened(gsc)) {
		v4l2_fh_release(file);
		return -EBUSY;
	}

	set_bit(ST_CAPT_OPEN, &gsc->state);

	pm_runtime_get_sync(&gsc->pdev->dev);

	if (++gsc->cap.refcnt == 1) {
		ret = gsc_cap_pipeline_initialize(gsc, &gsc->cap.vfd->entity, true);
		if (ret < 0) {
			gsc_err("gsc pipeline initialization failed\n");
			goto err;
		}

		ret = gsc_capture_ctrls_create(gsc);
		if (ret) {
			gsc_err("failed to create controls\n");
			goto err;
		}
	}

	gsc_dbg("pid: %d, state: 0x%lx", task_pid_nr(current), gsc->state);

	return 0;

err:
	pm_runtime_put_sync(&gsc->pdev->dev);
	v4l2_fh_release(file);
	clear_bit(ST_CAPT_OPEN, &gsc->state);
	return ret;
}
static int camera_v4l2_fh_open(struct file *filep)
{
	struct msm_video_device *pvdev = video_drvdata(filep);
	struct camera_v4l2_private *sp;

	sp = kzalloc(sizeof(*sp), GFP_KERNEL);

	if (!sp) {
		pr_err("%s : memory not available\n", __func__);
		return -ENOMEM;
	}
	filep->private_data = &sp->fh;

	/* stream_id = open id */
	sp->stream_id = atomic_read(&pvdev->stream_cnt);

	v4l2_fh_init(&sp->fh, pvdev->vdev);
	v4l2_fh_add(&sp->fh);

	return 0;
}
static int vidioc_g_tuner(struct file *file, void *priv,
					struct v4l2_tuner *v)
{
	struct zoltrix *zol = video_drvdata(file);

	if (v->index > 0)
		return -EINVAL;

	strlcpy(v->name, "FM", sizeof(v->name));
	v->type = V4L2_TUNER_RADIO;
	v->rangelow = 88 * 16000;
	v->rangehigh = 108 * 16000;
	v->rxsubchans = V4L2_TUNER_SUB_MONO | V4L2_TUNER_SUB_STEREO;
	v->capability = V4L2_TUNER_CAP_LOW;
	if (zol_is_stereo(zol))
		v->audmode = V4L2_TUNER_MODE_STEREO;
	else
		v->audmode = V4L2_TUNER_MODE_MONO;
	v->signal = 0xFFFF * zol_getsigstr(zol);
	return 0;
}
/*
 * si470x_vidioc_s_tuner - set tuner attributes
 */
static int si470x_vidioc_s_tuner(struct file *file, void *priv,
		struct v4l2_tuner *tuner)
{
	struct si470x_device *radio = video_drvdata(file);

	if (tuner->index != 0)
		return -EINVAL;

	/* mono/stereo selector */
	switch (tuner->audmode) {
	case V4L2_TUNER_MODE_MONO:
		radio->registers[POWERCFG] |= POWERCFG_MONO;  /* force mono */
		break;
	case V4L2_TUNER_MODE_STEREO:
	default:
		radio->registers[POWERCFG] &= ~POWERCFG_MONO; /* try stereo */
		break;
	}

	return si470x_set_register(radio, POWERCFG);
}
/* Get modulator attributes. If mode is not TX, return no attributes. */
static int fm_v4l2_vidioc_g_modulator(struct file *file, void *priv,
		struct v4l2_modulator *mod)
{
	struct fmdev *fmdev = video_drvdata(file);;

	if (mod->index != 0)
		return -EINVAL;

	if (fmdev->curr_fmmode != FM_MODE_TX)
		return -EPERM;

	mod->txsubchans = ((fmdev->tx_data.aud_mode == FM_STEREO_MODE) ?
				V4L2_TUNER_SUB_STEREO : V4L2_TUNER_SUB_MONO) |
				((fmdev->tx_data.rds.flag == FM_RDS_ENABLE) ?
				V4L2_TUNER_SUB_RDS : 0);

	mod->capability = V4L2_TUNER_CAP_STEREO | V4L2_TUNER_CAP_RDS |
				V4L2_TUNER_CAP_LOW;

	return 0;
}
static int vidioc_s_ctrl(struct file *file, void *priv,
				struct v4l2_control *ctrl)
{
	struct rtrack2 *rt = video_drvdata(file);

	switch (ctrl->id) {
	case V4L2_CID_AUDIO_MUTE:
		if (ctrl->value)
			rt_mute(rt);
		else
			rt_unmute(rt);
		return 0;
	case V4L2_CID_AUDIO_VOLUME:
		if (ctrl->value)
			rt_unmute(rt);
		else
			rt_mute(rt);
		return 0;
	}
	return -EINVAL;
}
Beispiel #28
0
static int fimc_capture_close(struct file *file)
{
	struct fimc_dev *fimc = video_drvdata(file);

	dbg("pid: %d, state: 0x%lx", task_pid_nr(current), fimc->state);

	if (--fimc->vid_cap.refcnt == 0) {
		clear_bit(ST_CAPT_BUSY, &fimc->state);
		fimc_stop_capture(fimc, false);
		fimc_pipeline_shutdown(fimc);
		clear_bit(ST_CAPT_SUSPENDED, &fimc->state);
	}

	pm_runtime_put(&fimc->pdev->dev);

	if (fimc->vid_cap.refcnt == 0) {
		vb2_queue_release(&fimc->vid_cap.vbq);
		fimc_ctrls_delete(fimc->vid_cap.ctx);
	}
	return v4l2_fh_release(file);
}
Beispiel #29
0
static int cpia2_g_selection(struct file *file, void *fh,
			     struct v4l2_selection *s)
{
	struct camera_data *cam = video_drvdata(file);

	if (s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
		return -EINVAL;

	switch (s->target) {
	case V4L2_SEL_TGT_CROP_BOUNDS:
	case V4L2_SEL_TGT_CROP_DEFAULT:
		s->r.left = 0;
		s->r.top = 0;
		s->r.width = cam->width;
		s->r.height = cam->height;
		break;
	default:
		return -EINVAL;
	}
	return 0;
}
/* Set hardware frequency seek. If current mode is NOT RX, set it RX. */
static int fm_v4l2_vidioc_s_hw_freq_seek(struct file *file, void *priv,
		struct v4l2_hw_freq_seek *seek)
{
	struct fmdev *fmdev = video_drvdata(file);
	int ret;

	if (fmdev->curr_fmmode != FM_MODE_RX) {
		ret = fmc_set_mode(fmdev, FM_MODE_RX);
		if (ret != 0) {
			fmerr("Failed to set RX mode\n");
			return ret;
		}
	}

	ret = fm_rx_seek(fmdev, seek->seek_upward, seek->wrap_around,
			seek->spacing, SEEK_START);
	if (ret < 0)
		fmerr("RX seek failed - %d\n", ret);

	return ret;
}