コード例 #1
0
ファイル: vsp1_entity.c プロジェクト: 19Dan01/linux
/*
 * vsp1_entity_init_formats - Initialize formats on all pads
 * @subdev: V4L2 subdevice
 * @cfg: V4L2 subdev pad configuration
 *
 * Initialize all pad formats with default values. If cfg is not NULL, try
 * formats are initialized on the file handle. Otherwise active formats are
 * initialized on the device.
 */
void vsp1_entity_init_formats(struct v4l2_subdev *subdev,
			    struct v4l2_subdev_pad_config *cfg)
{
	struct v4l2_subdev_format format;
	unsigned int pad;

	for (pad = 0; pad < subdev->entity.num_pads - 1; ++pad) {
		memset(&format, 0, sizeof(format));

		format.pad = pad;
		format.which = cfg ? V4L2_SUBDEV_FORMAT_TRY
			     : V4L2_SUBDEV_FORMAT_ACTIVE;

		v4l2_subdev_call(subdev, pad, set_fmt, cfg, &format);
	}
}
コード例 #2
0
ファイル: nxp-video.c プロジェクト: iTOP4418/kernel-3.4.39
static int nxp_video_cropcap(struct file *file, void *fh,
        struct v4l2_cropcap *a)
{
    int ret;
    u32 pad;
    struct nxp_video *me = file->private_data;
    struct v4l2_subdev *subdev = _get_remote_subdev(me, a->type, &pad);

    pr_debug("%s\n", __func__);

    ret = v4l2_subdev_call(subdev, video, cropcap, a);
    if (ret < 0) {
        pr_err("%s: failed to subdev cropcap\n", __func__);
    }
    return ret == -ENOIOCTLCMD ? -EINVAL : ret;
}
コード例 #3
0
ファイル: mixer_drv.c プロジェクト: 513855417/linux
void mxr_get_mbus_fmt(struct mxr_device *mdev,
	struct v4l2_mbus_framefmt *mbus_fmt)
{
	struct v4l2_subdev *sd;
	struct v4l2_subdev_format fmt = {
		.which = V4L2_SUBDEV_FORMAT_ACTIVE,
	};
	int ret;

	mutex_lock(&mdev->mutex);
	sd = to_outsd(mdev);
	ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt);
	*mbus_fmt = fmt.format;
	WARN(ret, "failed to get mbus_fmt for output %s\n", sd->name);
	mutex_unlock(&mdev->mutex);
}
コード例 #4
0
ファイル: rcar-v4l2.c プロジェクト: AshishNamdev/linux
static int rvin_dv_timings_cap(struct file *file, void *priv_fh,
			       struct v4l2_dv_timings_cap *cap)
{
	struct rvin_dev *vin = video_drvdata(file);
	struct v4l2_subdev *sd = vin_to_source(vin);
	int pad, ret;

	pad = cap->pad;
	cap->pad = vin->sink_pad_idx;

	ret = v4l2_subdev_call(sd, pad, dv_timings_cap, cap);

	cap->pad = pad;

	return ret;
}
コード例 #5
0
ファイル: nxp-video.c プロジェクト: iTOP4418/kernel-3.4.39
static int nxp_video_set_crop(struct file *file, void *fh,
        struct v4l2_crop *a)
{
    int ret;
    u32 pad;
    struct nxp_video *me = file->private_data;
    struct v4l2_subdev *subdev = _get_remote_subdev(me, a->type, &pad);
    struct v4l2_subdev_crop subdev_crop;

    vmsg("%s: name %s, crop pad %d\n", __func__, me->name, a->pad);

    subdev_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE;
    /* TODO */
#if 0
    subdev_crop.pad = ((me->type == NXP_VIDEO_TYPE_CAPTURE) ||
        (me->type == NXP_VIDEO_TYPE_M2M)) ? 1 : 0;
#else
    if (me->type == NXP_VIDEO_TYPE_OUT) {
        if (a->pad == 0)
            subdev_crop.pad = pad;
        else
            subdev_crop.pad = a->pad;
    } else {
        /* add for clipper cropping */
        struct nxp_video_frame *frame;
        frame = &me->frame[0];
        frame->width = a->c.width;
        frame->height = a->c.height;
        /* end clipper cropping */

        if (a->pad == 0)
            subdev_crop.pad = 1;
        else
            subdev_crop.pad = a->pad;
    }
#endif
    subdev_crop.rect = a->c;

    vmsg("%s: call subdev set_crop\n", __func__);
    ret = v4l2_subdev_call(subdev, pad, set_crop, NULL, &subdev_crop);
    if (ret < 0) {
        pr_err("%s: failed to subdev set_crop, ret %d\n", __func__, ret);
        return ret;
    }

    return 0;
}
コード例 #6
0
static int vpif_streamoff(struct file *file, void *priv,
				enum v4l2_buf_type buftype)
{

	struct vpif_fh *fh = priv;
	struct channel_obj *ch = fh->channel;
	struct common_obj *common = &ch->common[VPIF_VIDEO_INDEX];
	int ret;

	vpif_dbg(2, debug, "vpif_streamoff\n");

	if (buftype != V4L2_BUF_TYPE_VIDEO_CAPTURE) {
		vpif_dbg(1, debug, "buffer type not supported\n");
		return -EINVAL;
	}

	
	if (!fh->io_allowed[VPIF_VIDEO_INDEX]) {
		vpif_dbg(1, debug, "io not allowed\n");
		return -EACCES;
	}

	
	if (!common->started) {
		vpif_dbg(1, debug, "channel->started\n");
		return -EINVAL;
	}

	
	if (VPIF_CHANNEL0_VIDEO == ch->channel_id) {
		enable_channel0(0);
		channel0_intr_enable(0);
	} else {
		enable_channel1(0);
		channel1_intr_enable(0);
	}

	common->started = 0;

	ret = v4l2_subdev_call(vpif_obj.sd[ch->curr_sd_index], video,
				s_stream, 0);

	if (ret && (ret != -ENOIOCTLCMD))
		vpif_dbg(1, debug, "stream off failed in subdev\n");

	return videobuf_streamoff(&common->buffer_queue);
}
コード例 #7
0
/*
 * return sensor chip identification
 */
static int atomisp_g_chip_ident(struct file *file, void *fh,
	struct v4l2_dbg_chip_ident *chip)
{
	struct video_device *vdev = video_devdata(file);
	struct atomisp_device *isp = video_get_drvdata(vdev);
	int ret = 0;

	mutex_lock(&isp->input_lock);
	ret = v4l2_subdev_call(isp->inputs[isp->input_curr].camera,
			       core, g_chip_ident, chip);
	mutex_unlock(&isp->input_lock);

	if (ret)
		v4l2_err(&atomisp_dev,
			    "failed to g_chip_ident for sensor\n");
	return ret;
}
コード例 #8
0
ファイル: vpbe.c プロジェクト: jmw7912/wat-0016-kernel-2.6.37
/**
 * vpbe_set_mode - Set mode in the current encoder using mode info
 *
 * Use the mode string to decide what timings to set in the encoder
 * This is typically useful when fbset command is used to change the current
 * timings by specifying a string to indicate the timings.
 */
static int vpbe_set_mode(struct vpbe_device *vpbe_dev,
			 struct vpbe_enc_mode_info *mode_info)
{
	struct vpbe_display_config *vpbe_config = vpbe_dev->cfg;
	int out_index = vpbe_dev->current_out_index, ret = 0, i;
	struct vpbe_enc_mode_info *preset_mode = NULL;
	struct v4l2_dv_preset dv_preset;
	if ((NULL == mode_info) || (NULL == mode_info->name))
		return -EINVAL;

	for (i = 0; i < vpbe_config->outputs[out_index].num_modes; i++) {
		if (!strcmp(mode_info->name,
		     vpbe_config->outputs[out_index].modes[i].name)) {
			preset_mode = &vpbe_config->outputs[out_index].modes[i];
			/*
			 * it may be one of the 3 timings type. Check and
			 * invoke right API
			 */
			if (preset_mode->timings_type & VPBE_ENC_STD)
				return vpbe_s_std(vpbe_dev,
						 &preset_mode->timings.std_id);
			if (preset_mode->timings_type & VPBE_ENC_DV_PRESET) {
				dv_preset.preset =
					preset_mode->timings.dv_preset;
				return vpbe_s_dv_preset(vpbe_dev, &dv_preset);
			}
		}
	}

	/* Only custom timing should reach here */
	if (preset_mode == NULL)
		return -EINVAL;

	mutex_lock(&vpbe_dev->lock);
	ret = v4l2_subdev_call(vpbe_dev->venc, core, ioctl,
			       VENC_CONFIGURE, preset_mode);
	if (!ret) {
		vpbe_dev->current_timings = *preset_mode;
		osd_device->ops.set_left_margin(osd_device,
			vpbe_dev->current_timings.left_margin);
		osd_device->ops.set_top_margin(osd_device,
			vpbe_dev->current_timings.upper_margin);
	}
	mutex_unlock(&vpbe_dev->lock);
	return ret;
}
コード例 #9
0
static int get_camera_scales(struct v4l2_subdev *sd, struct v4l2_rect *rect,
			     unsigned int *scale_h, unsigned int *scale_v)
{
	struct v4l2_format f;
	int ret;

	f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

	ret = v4l2_subdev_call(sd, video, g_fmt, &f);
	if (ret < 0)
		return ret;

	*scale_h = calc_generic_scale(rect->width, f.fmt.pix.width);
	*scale_v = calc_generic_scale(rect->height, f.fmt.pix.height);

	return 0;
}
コード例 #10
0
ファイル: iss_video.c プロジェクト: Astralix/mainline-dss11
static int
iss_video_set_crop(struct file *file, void *fh, const struct v4l2_crop *crop)
{
	struct iss_video *video = video_drvdata(file);
	struct v4l2_subdev *subdev;
	int ret;

	subdev = iss_video_remote_subdev(video, NULL);
	if (subdev == NULL)
		return -EINVAL;

	mutex_lock(&video->mutex);
	ret = v4l2_subdev_call(subdev, video, s_crop, crop);
	mutex_unlock(&video->mutex);

	return ret == -ENOIOCTLCMD ? -ENOTTY : ret;
}
コード例 #11
0
ファイル: iss.c プロジェクト: vanquishsecurity/linux
/*
 * iss_pipeline_enable - Enable streaming on a pipeline
 * @pipe: ISS pipeline
 * @mode: Stream mode (single shot or continuous)
 *
 * Walk the entities chain starting at the pipeline output video node and start
 * all modules in the chain in the given mode.
 *
 * Return 0 if successful, or the return value of the failed video::s_stream
 * operation otherwise.
 */
static int iss_pipeline_enable(struct iss_pipeline *pipe,
			       enum iss_pipeline_stream_state mode)
{
	struct iss_device *iss = pipe->output->iss;
	struct media_entity *entity;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;
	unsigned long flags;
	int ret;

	/* If one of the entities in the pipeline has crashed it will not work
	 * properly. Refuse to start streaming in that case. This check must be
	 * performed before the loop below to avoid starting entities if the
	 * pipeline won't start anyway (those entities would then likely fail to
	 * stop, making the problem worse).
	 */
	if (pipe->entities & iss->crashed)
		return -EIO;

	spin_lock_irqsave(&pipe->lock, flags);
	pipe->state &= ~(ISS_PIPELINE_IDLE_INPUT | ISS_PIPELINE_IDLE_OUTPUT);
	spin_unlock_irqrestore(&pipe->lock, flags);

	pipe->do_propagation = false;

	entity = &pipe->output->video.entity;
	while (1) {
		pad = &entity->pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		pad = media_entity_remote_pad(pad);
		if (pad == NULL ||
		    media_entity_type(pad->entity) != MEDIA_ENT_T_V4L2_SUBDEV)
			break;

		entity = pad->entity;
		subdev = media_entity_to_v4l2_subdev(entity);

		ret = v4l2_subdev_call(subdev, video, s_stream, mode);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			return ret;
	}
	iss_print_status(pipe->output->iss);
	return 0;
}
コード例 #12
0
/**
 * __subdev_set_power - change power state of a single subdev
 * @sd: subdevice to change power state for
 * @on: 1 to enable power or 0 to disable
 *
 * Return result of s_power subdev operation or -ENXIO if sd argument
 * is NULL. Return 0 if the subdevice does not implement s_power.
 */
static int __subdev_set_power(struct v4l2_subdev *sd, int on)
{
	int *use_count;
	int ret;

	if (sd == NULL)
		return -ENXIO;

	use_count = &sd->entity.use_count;
	if (on && (*use_count)++ > 0)
		return 0;
	else if (!on && (*use_count == 0 || --(*use_count) > 0))
		return 0;
	ret = v4l2_subdev_call(sd, core, s_power, on);

	return ret != -ENOIOCTLCMD ? ret : 0;
}
コード例 #13
0
int32_t msm_sensor_cci_i2c_util(struct msm_camera_i2c_client *client,
	uint16_t cci_cmd)
{
	int32_t rc = 0;
	struct msm_camera_cci_ctrl cci_ctrl;

	CDBG("%s line %d\n", __func__, __LINE__);
	cci_ctrl.cmd = cci_cmd;
	cci_ctrl.cci_info = client->cci_client;
	rc = v4l2_subdev_call(client->cci_client->cci_subdev,
			core, ioctl, VIDIOC_MSM_CCI_CFG, &cci_ctrl);
	if (rc < 0) {
		pr_err("%s line %d rc = %d\n", __func__, __LINE__, rc);
		return rc;
	}
	return cci_ctrl.status;
}
コード例 #14
0
static int venc_is_second_field(struct vpbe_display *disp_dev)
{
	struct vpbe_device *vpbe_dev = disp_dev->vpbe_dev;
	int ret;
	int val;

	ret = v4l2_subdev_call(vpbe_dev->venc,
			       core,
			       ioctl,
			       VENC_GET_FLD,
			       &val);
	if (ret < 0) {
		v4l2_err(&vpbe_dev->v4l2_dev,
			 "Error in getting Field ID 0\n");
	}
	return val;
}
コード例 #15
0
ファイル: gsc-core.c プロジェクト: ArthySundaram/chromeos-kvm
int gsc_out_link_validate(const struct media_pad *source,
			  const struct media_pad *sink)
{
	struct v4l2_subdev_format src_fmt;
	struct v4l2_subdev_crop dst_crop;
	struct v4l2_subdev *sd;
	struct gsc_dev *gsc;
	struct gsc_frame *f;
	int ret;

	if (media_entity_type(source->entity) != MEDIA_ENT_T_V4L2_SUBDEV ||
	    media_entity_type(sink->entity) != MEDIA_ENT_T_V4L2_SUBDEV) {
		gsc_err("media entity type isn't subdev\n");
		return 0;
	}

	sd = media_entity_to_v4l2_subdev(source->entity);
	gsc = entity_data_to_gsc(v4l2_get_subdevdata(sd));
	f = &gsc->out.ctx->d_frame;

	src_fmt.format.width = f->crop.width;
	src_fmt.format.height = f->crop.height;
	src_fmt.format.code = f->fmt->mbus_code;

	sd = media_entity_to_v4l2_subdev(sink->entity);
	/* To check if G-Scaler destination size and Mixer destinatin size
	   are the same */
	dst_crop.pad = sink->index;
	dst_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE;
	ret = v4l2_subdev_call(sd, pad, get_crop, NULL, &dst_crop);
	if (ret < 0 && ret != -ENOIOCTLCMD) {
		gsc_err("subdev get_fmt is failed\n");
		return -EPIPE;
	}

	if (src_fmt.format.width != dst_crop.rect.width ||
	    src_fmt.format.height != dst_crop.rect.height) {
		gsc_err("sink and source format is different\
			src_fmt.w = %d, src_fmt.h = %d,\
			dst_crop.w = %d, dst_crop.h = %d, rotation = %d",
			src_fmt.format.width, src_fmt.format.height,
			dst_crop.rect.width, dst_crop.rect.height,
			gsc->out.ctx->gsc_ctrls.rotate->val);
		return -EINVAL;
	}
コード例 #16
0
ファイル: mx1_camera.c プロジェクト: 03199618/linux
static int mx1_camera_set_fmt(struct soc_camera_device *icd,
			      struct v4l2_format *f)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	const struct soc_camera_format_xlate *xlate;
	struct v4l2_pix_format *pix = &f->fmt.pix;
	struct v4l2_mbus_framefmt mf;
	int ret, buswidth;

	xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
	if (!xlate) {
		dev_warn(icd->parent, "Format %x not found\n",
			 pix->pixelformat);
		return -EINVAL;
	}

	buswidth = xlate->host_fmt->bits_per_sample;
	if (buswidth > 8) {
		dev_warn(icd->parent,
			 "bits-per-sample %d for format %x unsupported\n",
			 buswidth, pix->pixelformat);
		return -EINVAL;
	}

	mf.width	= pix->width;
	mf.height	= pix->height;
	mf.field	= pix->field;
	mf.colorspace	= pix->colorspace;
	mf.code		= xlate->code;

	ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
	if (ret < 0)
		return ret;

	if (mf.code != xlate->code)
		return -EINVAL;

	pix->width		= mf.width;
	pix->height		= mf.height;
	pix->field		= mf.field;
	pix->colorspace		= mf.colorspace;
	icd->current_fmt	= xlate;

	return ret;
}
コード例 #17
0
ファイル: rcar-v4l2.c プロジェクト: AshishNamdev/linux
static int rvin_s_dv_timings(struct file *file, void *priv_fh,
			     struct v4l2_dv_timings *timings)
{
	struct rvin_dev *vin = video_drvdata(file);
	struct v4l2_subdev *sd = vin_to_source(vin);
	int ret;

	ret = v4l2_subdev_call(sd, video, s_dv_timings, timings);
	if (ret)
		return ret;

	vin->source.width = timings->bt.width;
	vin->source.height = timings->bt.height;
	vin->format.width = timings->bt.width;
	vin->format.height = timings->bt.height;

	return 0;
}
コード例 #18
0
ファイル: rcar-v4l2.c プロジェクト: AshishNamdev/linux
static int rvin_s_edid(struct file *file, void *fh, struct v4l2_edid *edid)
{
	struct rvin_dev *vin = video_drvdata(file);
	struct v4l2_subdev *sd = vin_to_source(vin);
	int input, ret;

	if (edid->pad)
		return -EINVAL;

	input = edid->pad;
	edid->pad = vin->sink_pad_idx;

	ret = v4l2_subdev_call(sd, pad, set_edid, edid);

	edid->pad = input;

	return ret;
}
コード例 #19
0
/*
 * crop capability is the max resolution both ISP and Sensor supported
 */
static int atomisp_cropcap(struct file *file, void *fh,
	struct v4l2_cropcap *cropcap)
{
	struct video_device *vdev = video_devdata(file);
	struct atomisp_device *isp = video_get_drvdata(vdev);
	struct atomisp_video_pipe *pipe = atomisp_to_video_pipe(vdev);
	struct v4l2_mbus_framefmt snr_mbus_fmt;
	int ret;

	if (cropcap->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) {
		v4l2_err(&atomisp_dev, "unsupport v4l2 buf type\n");
		return -EINVAL;
	}

	/*Only capture node supports cropcap*/
	if (!pipe->is_main)
		return 0;

	cropcap->bounds.left = 0;
	cropcap->bounds.top = 0;

	snr_mbus_fmt.code = V4L2_MBUS_FMT_FIXED;
	snr_mbus_fmt.height = ATOM_ISP_MAX_HEIGHT_TMP;
	snr_mbus_fmt.width = ATOM_ISP_MAX_WIDTH_TMP;

	ret = v4l2_subdev_call(isp->inputs[isp->input_curr].camera,
			       video, try_mbus_fmt, &snr_mbus_fmt);
	if (ret) {
		v4l2_err(&atomisp_dev,
			"failed to try_mbus_fmt for sensor"
			", try try_fmt\n");
	} else {
		cropcap->bounds.width = snr_mbus_fmt.width;
		cropcap->bounds.height = snr_mbus_fmt.height;
		isp->snr_max_width = snr_mbus_fmt.width;
		isp->snr_max_height = snr_mbus_fmt.height;
		isp->snr_pixelformat = snr_mbus_fmt.code;
	}

	memcpy(&cropcap->defrect, &cropcap->bounds, sizeof(struct v4l2_rect));
	cropcap->pixelaspect.numerator = 1;
	cropcap->pixelaspect.denominator = 1;
	return 0;
}
コード例 #20
0
ファイル: camss-video.c プロジェクト: ReneNyffenegger/linux
static int video_start_streaming(struct vb2_queue *q, unsigned int count)
{
	struct camss_video *video = vb2_get_drv_priv(q);
	struct video_device *vdev = &video->vdev;
	struct media_entity *entity;
	struct media_pad *pad;
	struct v4l2_subdev *subdev;
	int ret;

	ret = media_pipeline_start(&vdev->entity, &video->pipe);
	if (ret < 0)
		return ret;

	ret = video_check_format(video);
	if (ret < 0)
		goto error;

	entity = &vdev->entity;
	while (1) {
		pad = &entity->pads[0];
		if (!(pad->flags & MEDIA_PAD_FL_SINK))
			break;

		pad = media_entity_remote_pad(pad);
		if (!pad || !is_media_entity_v4l2_subdev(pad->entity))
			break;

		entity = pad->entity;
		subdev = media_entity_to_v4l2_subdev(entity);

		ret = v4l2_subdev_call(subdev, video, s_stream, 1);
		if (ret < 0 && ret != -ENOIOCTLCMD)
			goto error;
	}

	return 0;

error:
	media_pipeline_stop(&vdev->entity);

	video->ops->flush_buffers(video, VB2_BUF_STATE_QUEUED);

	return ret;
}
コード例 #21
0
ファイル: fimc-capture.c プロジェクト: jerem/hi35xx-buildroot
static int fimc_stop_capture(struct fimc_dev *fimc)
{
    unsigned long flags;
    struct fimc_vid_cap *cap;
    struct fimc_vid_buffer *buf;

    cap = &fimc->vid_cap;

    if (!fimc_capture_active(fimc))
        return 0;

    spin_lock_irqsave(&fimc->slock, flags);
    set_bit(ST_CAPT_SHUT, &fimc->state);
    fimc_deactivate_capture(fimc);
    spin_unlock_irqrestore(&fimc->slock, flags);

    wait_event_timeout(fimc->irq_queue,
                       !test_bit(ST_CAPT_SHUT, &fimc->state),
                       FIMC_SHUTDOWN_TIMEOUT);

    v4l2_subdev_call(cap->sd, video, s_stream, 0);

    spin_lock_irqsave(&fimc->slock, flags);
    fimc->state &= ~(1 << ST_CAPT_RUN | 1 << ST_CAPT_PEND |
                     1 << ST_CAPT_SHUT | 1 << ST_CAPT_STREAM);

    fimc->vid_cap.active_buf_cnt = 0;

    /* Release buffers that were enqueued in the driver by videobuf2. */
    while (!list_empty(&cap->pending_buf_q)) {
        buf = pending_queue_pop(cap);
        vb2_buffer_done(&buf->vb, VB2_BUF_STATE_ERROR);
    }

    while (!list_empty(&cap->active_buf_q)) {
        buf = active_queue_pop(cap);
        vb2_buffer_done(&buf->vb, VB2_BUF_STATE_ERROR);
    }

    spin_unlock_irqrestore(&fimc->slock, flags);

    dbg("state: 0x%lx", fimc->state);
    return 0;
}
コード例 #22
0
ファイル: fimc-mdevice.c プロジェクト: AllenDou/linux
/**
 * fimc_pipeline_s_stream - invoke s_stream on pipeline subdevs
 * @pipeline: video pipeline structure
 * @on: passed as the s_stream call argument
 */
int fimc_pipeline_s_stream(struct fimc_pipeline *p, bool on)
{
	int i, ret;

	if (p->subdevs[IDX_SENSOR] == NULL)
		return -ENODEV;

	for (i = 0; i < IDX_MAX; i++) {
		unsigned int idx = on ? (IDX_MAX - 1) - i : i;

		ret = v4l2_subdev_call(p->subdevs[idx], video, s_stream, on);

		if (ret < 0 && ret != -ENOIOCTLCMD && ret != -ENODEV)
			return ret;
	}

	return 0;

}
コード例 #23
0
static int
v4l2_subdev_link_validate_get_format(struct media_pad *pad,
				     struct v4l2_subdev_format *fmt)
{
	if (media_entity_type(pad->entity) == MEDIA_ENT_T_V4L2_SUBDEV) {
		struct v4l2_subdev *sd =
			media_entity_to_v4l2_subdev(pad->entity);

		fmt->which = V4L2_SUBDEV_FORMAT_ACTIVE;
		fmt->pad = pad->index;
		return v4l2_subdev_call(sd, pad, get_fmt, NULL, fmt);
	}

	WARN(pad->entity->type != MEDIA_ENT_T_DEVNODE_V4L,
	     "Driver bug! Wrong media entity type 0x%08x, entity %s\n",
	     pad->entity->type, pad->entity->name);

	return -EINVAL;
}
コード例 #24
0
ファイル: rcar-v4l2.c プロジェクト: AshishNamdev/linux
static int __rvin_try_format_source(struct rvin_dev *vin,
				    u32 which,
				    struct v4l2_pix_format *pix,
				    struct rvin_source_fmt *source)
{
	struct v4l2_subdev *sd;
	struct v4l2_subdev_pad_config *pad_cfg;
	struct v4l2_subdev_format format = {
		.which = which,
	};
	enum v4l2_field field;
	int ret;

	sd = vin_to_source(vin);

	v4l2_fill_mbus_format(&format.format, pix, vin->digital.code);

	pad_cfg = v4l2_subdev_alloc_pad_config(sd);
	if (pad_cfg == NULL)
		return -ENOMEM;

	format.pad = vin->src_pad_idx;

	field = pix->field;

	ret = v4l2_subdev_call(sd, pad, set_fmt, pad_cfg, &format);
	if (ret < 0 && ret != -ENOIOCTLCMD)
		goto done;

	v4l2_fill_pix_format(pix, &format.format);

	pix->field = field;

	source->width = pix->width;
	source->height = pix->height;

	vin_dbg(vin, "Source resolution: %ux%u\n", source->width,
		source->height);

done:
	v4l2_subdev_free_pad_config(pad_cfg);
	return ret;
}
コード例 #25
0
/* This function is called by open() function, so we need to init HW*/
static int msm_isp_open(struct v4l2_subdev *sd,
	struct msm_cam_media_controller *mctl)
{
	/* init vfe and senor, register sync callbacks for init*/
	int rc = 0;
	D("%s\n", __func__);
	if (!mctl) {
		pr_err("%s: param is NULL", __func__);
		return -EINVAL;
	}

	rc = v4l2_subdev_call(sd, core, ioctl,
				VIDIOC_MSM_VFE_INIT, NULL);
	if (rc < 0) {
		pr_err("%s: vfe_init failed at %d\n",
			__func__, rc);
	}
	return rc;
}
コード例 #26
-1
ファイル: camif-capture.c プロジェクト: AkyZero/wrapfs-latest
static int sensor_set_power(struct camif_dev *camif, int on)
{
	struct cam_sensor *sensor = &camif->sensor;
	int err = 0;

	if (!on == camif->sensor.power_count)
		err = v4l2_subdev_call(sensor->sd, core, s_power, on);
	if (!err)
		sensor->power_count += on ? 1 : -1;

	pr_debug("on: %d, power_count: %d, err: %d\n",
		 on, sensor->power_count, err);

	return err;
}
コード例 #27
-1
int32_t msm_camera_cci_i2c_write_seq(struct msm_camera_i2c_client *client,
	uint32_t addr, uint8_t *data, uint32_t num_byte)
{
	int32_t rc = -EFAULT;
	uint8_t i = 0;
	struct msm_camera_cci_ctrl cci_ctrl;
	struct msm_camera_i2c_reg_array *reg_conf_tbl = NULL;

	if ((client->addr_type != MSM_CAMERA_I2C_BYTE_ADDR
		&& client->addr_type != MSM_CAMERA_I2C_WORD_ADDR)
		|| num_byte == 0)
		return rc;

	if (num_byte > I2C_SEQ_REG_DATA_MAX) {
		pr_err("%s: num_byte=%d clamped to max supported %d\n",
			__func__, num_byte, I2C_SEQ_REG_DATA_MAX);
		return rc;
	}

	S_I2C_DBG("%s reg addr = 0x%x num bytes: %d\n",
		__func__, addr, num_byte);

	reg_conf_tbl = kzalloc(num_byte *
		(sizeof(struct msm_camera_i2c_reg_array)), GFP_KERNEL);
	if (!reg_conf_tbl) {
		pr_err("%s:%d no memory\n", __func__, __LINE__);
		return -ENOMEM;
	}

	reg_conf_tbl[0].reg_addr = addr;
	for (i = 0; i < num_byte; i++) {
		reg_conf_tbl[i].reg_data = data[i];
		reg_conf_tbl[i].delay = 0;
	}
	cci_ctrl.cmd = MSM_CCI_I2C_WRITE_SEQ;
	cci_ctrl.cci_info = client->cci_client;
	cci_ctrl.cfg.cci_i2c_write_cfg.reg_setting = reg_conf_tbl;
	cci_ctrl.cfg.cci_i2c_write_cfg.data_type = MSM_CAMERA_I2C_BYTE_DATA;
	cci_ctrl.cfg.cci_i2c_write_cfg.addr_type = client->addr_type;
	cci_ctrl.cfg.cci_i2c_write_cfg.size = num_byte;
	rc = v4l2_subdev_call(client->cci_client->cci_subdev,
			core, ioctl, VIDIOC_MSM_CCI_CFG, &cci_ctrl);
	CDBG("%s line %d rc = %d\n", __func__, __LINE__, rc);
	rc = cci_ctrl.status;
	kfree(reg_conf_tbl);
	reg_conf_tbl = NULL;
	return rc;
}
コード例 #28
-1
ファイル: omap1_camera.c プロジェクト: andi34/Dhollmen_Kernel
static int omap1_cam_set_crop(struct soc_camera_device *icd,
			       struct v4l2_crop *crop)
{
	struct v4l2_rect *rect = &crop->c;
	const struct soc_camera_format_xlate *xlate = icd->current_fmt;
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
	struct omap1_cam_dev *pcdev = ici->priv;
	struct device *dev = icd->dev.parent;
	struct v4l2_mbus_framefmt mf;
	int ret;

	ret = subdev_call_with_sense(pcdev, dev, icd, sd, s_crop, crop);
	if (ret < 0) {
		dev_warn(dev, "%s: failed to crop to %ux%u@%u:%u\n", __func__,
			 rect->width, rect->height, rect->left, rect->top);
		return ret;
	}

	ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf);
	if (ret < 0) {
		dev_warn(dev, "%s: failed to fetch current format\n", __func__);
		return ret;
	}

	ret = dma_align(&mf.width, &mf.height, xlate->host_fmt, pcdev->vb_mode,
			false);
	if (ret < 0) {
		dev_err(dev, "%s: failed to align %ux%u %s with DMA\n",
				__func__, mf.width, mf.height,
				xlate->host_fmt->name);
		return ret;
	}

	if (!ret) {
		/* sensor returned geometry not DMA aligned, trying to fix */
		ret = set_mbus_format(pcdev, dev, icd, sd, &mf, xlate);
		if (ret < 0) {
			dev_err(dev, "%s: failed to set format\n", __func__);
			return ret;
		}
	}

	icd->user_width	 = mf.width;
	icd->user_height = mf.height;

	return 0;
}
コード例 #29
-1
ファイル: rcar-v4l2.c プロジェクト: AshishNamdev/linux
static int rvin_enum_dv_timings(struct file *file, void *priv_fh,
				struct v4l2_enum_dv_timings *timings)
{
	struct rvin_dev *vin = video_drvdata(file);
	struct v4l2_subdev *sd = vin_to_source(vin);
	int pad, ret;

	pad = timings->pad;
	timings->pad = vin->sink_pad_idx;

	ret = v4l2_subdev_call(sd, pad, enum_dv_timings, timings);

	timings->pad = pad;

	return ret;
}
コード例 #30
-1
static int vpif_querystd(struct file *file, void *priv, v4l2_std_id *std_id)
{
	struct vpif_fh *fh = priv;
	struct channel_obj *ch = fh->channel;
	int ret = 0;

	vpif_dbg(2, debug, "vpif_querystd\n");

	
	ret = v4l2_subdev_call(vpif_obj.sd[ch->curr_sd_index], video,
				querystd, std_id);
	if (ret < 0)
		vpif_dbg(1, debug, "Failed to set standard for sub devices\n");

	return ret;
}