Exemplo n.º 1
0
/**
 * @brief:  Called when application apply buffers, camera buffer initial.
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *vq: V4L2 buffer queue information structure
 * @param [in] *count: buffer's number
 * @param [in] *size: buffer total size
 */
static int ak_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, 
								unsigned int *size)
{
	struct soc_camera_device *icd = vq->priv_data;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						icd->current_fmt->host_fmt);

	bytes_per_line = icd->user_width * 3 /2;
	if (bytes_per_line < 0)
		return bytes_per_line;

	*size = bytes_per_line * icd->user_height;

	if (*count < 4) {
		printk("if use video mode, vbuf num isn't less than 4\n");
		*count = 4;
	}

	if (*size * *count > CONFIG_VIDEO_RESERVED_MEM_SIZE)
		*count = (CONFIG_VIDEO_RESERVED_MEM_SIZE) / *size;
	
	isp_dbg("%s count=%d, size=%d, bytes_per_line=%d\n",
			__func__, *count, *size, bytes_per_line);
	
	return 0;
}
static int unicam_videobuf_prepare(struct vb2_buffer *vb)
{
	struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue);
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						     icd->
						     current_fmt->host_fmt);
	unsigned long size;

	pr_debug("-enter");
	if (bytes_per_line < 0)
		return bytes_per_line;

	pr_debug("vb=0x%p buf=0x%p, size=%lu", vb,
			(void *)vb2_plane_dma_addr(vb, 0),
			vb2_get_plane_payload(vb, 0));

	size = icd->user_height * bytes_per_line;

	if (vb2_plane_size(vb, 0) < size) {
		dev_err(icd->dev.parent, "Buffer too small (%lu < %lu)\n",
			vb2_plane_size(vb, 0), size);
		return -ENOBUFS;
	}
	vb2_set_plane_payload(vb, 0, size);

	pr_debug("-exit");
	return 0;
}
/* videobuf operations */
static int unicam_videobuf_setup(struct vb2_queue *vq, const struct v4l2_format *fmt,
 				 unsigned int *count, unsigned int *numplanes,
				unsigned int sizes[], void *alloc_ctxs[])
{
	struct soc_camera_device *icd = soc_camera_from_vb2q(vq);
	struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
	struct unicam_camera_dev *unicam_dev =
	    (struct unicam_camera_dev *)ici->priv;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						     icd->
						     current_fmt->host_fmt);

	pr_debug("-enter");

	if (bytes_per_line < 0)
		return bytes_per_line;

	*numplanes = 1;

	unicam_dev->sequence = 0;

	sizes[0] = bytes_per_line * icd->user_height;
#if defined(CONFIG_VIDEOBUF2_DMA_CONTIG)
	alloc_ctxs[0] = unicam_dev->alloc_ctx;
#endif

	if (!*count)
		*count = 2;

	pr_debug("no_of_buf=%d size=%u", *count, sizes[0]);

	pr_debug("-exit");
	return 0;
}
Exemplo n.º 4
0
/*
 *  Videobuf operations
 */
static int omap1_videobuf_setup(struct videobuf_queue *vq, unsigned int *count,
		unsigned int *size)
{
	struct soc_camera_device *icd = vq->priv_data;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
			icd->current_fmt->host_fmt);
	struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
	struct omap1_cam_dev *pcdev = ici->priv;

	if (bytes_per_line < 0)
		return bytes_per_line;

	*size = bytes_per_line * icd->user_height;

	if (!*count || *count < OMAP1_CAMERA_MIN_BUF_COUNT(pcdev->vb_mode))
		*count = OMAP1_CAMERA_MIN_BUF_COUNT(pcdev->vb_mode);

	if (*size * *count > MAX_VIDEO_MEM * 1024 * 1024)
		*count = (MAX_VIDEO_MEM * 1024 * 1024) / *size;

	dev_dbg(icd->dev.parent,
			"%s: count=%d, size=%d\n", __func__, *count, *size);

	return 0;
}
Exemplo n.º 5
0
static int set_mbus_format(struct omap1_cam_dev *pcdev, struct device *dev,
		struct soc_camera_device *icd, struct v4l2_subdev *sd,
		struct v4l2_mbus_framefmt *mf,
		const struct soc_camera_format_xlate *xlate)
{
	s32 bytes_per_line;
	int ret = subdev_call_with_sense(pcdev, dev, icd, sd, s_mbus_fmt, mf);

	if (ret < 0) {
		dev_err(dev, "%s: s_mbus_fmt failed\n", __func__);
		return ret;
	}

	if (mf->code != xlate->code) {
		dev_err(dev, "%s: unexpected pixel code change\n", __func__);
		return -EINVAL;
	}

	bytes_per_line = soc_mbus_bytes_per_line(mf->width, xlate->host_fmt);
	if (bytes_per_line < 0) {
		dev_err(dev, "%s: soc_mbus_bytes_per_line() failed\n",
				__func__);
		return bytes_per_line;
	}

	if (!is_dma_aligned(bytes_per_line, mf->height, pcdev->vb_mode)) {
		dev_err(dev, "%s: resulting geometry %ux%u not DMA aligned\n",
				__func__, mf->width, mf->height);
		return -EINVAL;
	}
	return 0;
}
Exemplo n.º 6
0
/*
 * Calculate the __buffer__ (not data) size and number of buffers.
 */
static int mx3_videobuf_setup(struct vb2_queue *vq,
			unsigned int *count, unsigned int *num_planes,
			unsigned long sizes[], void *alloc_ctxs[])
{
	struct soc_camera_device *icd = soc_camera_from_vb2q(vq);
	struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
	struct mx3_camera_dev *mx3_cam = ici->priv;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						icd->current_fmt->host_fmt);

	if (bytes_per_line < 0)
		return bytes_per_line;

	if (!mx3_cam->idmac_channel[0])
		return -EINVAL;

	*num_planes = 1;

	mx3_cam->sequence = 0;
	sizes[0] = bytes_per_line * icd->user_height;
	alloc_ctxs[0] = mx3_cam->alloc_ctx;

	if (!*count)
		*count = 32;

	if (sizes[0] * *count > MAX_VIDEO_MEM * 1024 * 1024)
		*count = MAX_VIDEO_MEM * 1024 * 1024 / sizes[0];

	return 0;
}
Exemplo n.º 7
0
/*
 * .queue_setup() is called to check whether the driver can accept the requested
 * number of buffers and to fill in plane sizes for the current frame format if
 * required
 */
static int rcar_vin_videobuf_setup(struct vb2_queue *vq,
				   const struct v4l2_format *fmt,
				   unsigned int *count,
				   unsigned int *num_planes,
				   unsigned int sizes[], void *alloc_ctxs[])
{
	struct soc_camera_device *icd = soc_camera_from_vb2q(vq);
	struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
	struct rcar_vin_priv *priv = ici->priv;

	if (fmt) {
		const struct soc_camera_format_xlate *xlate;
		unsigned int bytes_per_line;
		int ret;

		xlate = soc_camera_xlate_by_fourcc(icd,
						   fmt->fmt.pix.pixelformat);
		if (!xlate)
			return -EINVAL;
		ret = soc_mbus_bytes_per_line(fmt->fmt.pix.width,
					      xlate->host_fmt);
		if (ret < 0)
			return ret;

		bytes_per_line = max_t(u32, fmt->fmt.pix.bytesperline, ret);

		ret = soc_mbus_image_size(xlate->host_fmt, bytes_per_line,
					  fmt->fmt.pix.height);
		if (ret < 0)
			return ret;

		sizes[0] = max_t(u32, fmt->fmt.pix.sizeimage, ret);
	} else {
		/* Called from VIDIOC_REQBUFS or in compatibility mode */
		sizes[0] = icd->sizeimage;
	}

	alloc_ctxs[0] = priv->alloc_ctx;

	if (!vq->num_buffers)
		priv->sequence = 0;

	if (!*count)
		*count = 2;
	priv->vb_count = *count;

	*num_planes = 1;

	/* Number of hardware slots */
	if (is_continuous_transfer(priv))
		priv->nr_hw_slots = MAX_BUFFER_NUM;
	else
		priv->nr_hw_slots = 1;

	dev_dbg(icd->parent, "count=%d, size=%u\n", *count, sizes[0]);

	return 0;
}
Exemplo n.º 8
0
static int mx3_camera_try_fmt(struct soc_camera_device *icd,
			      struct v4l2_format *f)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	const struct soc_camera_format_xlate *xlate;
	struct v4l2_pix_format *pix = &f->fmt.pix;
	struct v4l2_mbus_framefmt mf;
	__u32 pixfmt = pix->pixelformat;
	int ret;

	xlate = soc_camera_xlate_by_fourcc(icd, pixfmt);
	if (pixfmt && !xlate) {
		dev_warn(icd->dev.parent, "Format %x not found\n", pixfmt);
		return -EINVAL;
	}

	/* limit to MX3 hardware capabilities */
	if (pix->height > 4096)
		pix->height = 4096;
	if (pix->width > 4096)
		pix->width = 4096;

	pix->bytesperline = soc_mbus_bytes_per_line(pix->width,
						    xlate->host_fmt);
	if (pix->bytesperline < 0)
		return pix->bytesperline;
	pix->sizeimage = pix->height * pix->bytesperline;

	/* limit to sensor capabilities */
	mf.width	= pix->width;
	mf.height	= pix->height;
	mf.field	= pix->field;
	mf.colorspace	= pix->colorspace;
	mf.code		= xlate->code;

	ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
	if (ret < 0)
		return ret;

	pix->width	= mf.width;
	pix->height	= mf.height;
	pix->colorspace	= mf.colorspace;

	switch (mf.field) {
	case V4L2_FIELD_ANY:
		pix->field = V4L2_FIELD_NONE;
		break;
	case V4L2_FIELD_NONE:
		break;
	default:
		dev_err(icd->dev.parent, "Field type %d unsupported.\n",
			mf.field);
		ret = -EINVAL;
	}

	return ret;
}
static int mx1_videobuf_prepare(struct videobuf_queue *vq,
		struct videobuf_buffer *vb, enum v4l2_field field)
{
	struct soc_camera_device *icd = vq->priv_data;
	struct mx1_buffer *buf = container_of(vb, struct mx1_buffer, vb);
	int ret;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						icd->current_fmt->host_fmt);

	if (bytes_per_line < 0)
		return bytes_per_line;

	dev_dbg(icd->parent, "%s (vb=0x%p) 0x%08lx %d\n", __func__,
		vb, vb->baddr, vb->bsize);

	
	WARN_ON(!list_empty(&vb->queue));

	BUG_ON(NULL == icd->current_fmt);

	buf->inwork = 1;

	if (buf->code	!= icd->current_fmt->code ||
	    vb->width	!= icd->user_width ||
	    vb->height	!= icd->user_height ||
	    vb->field	!= field) {
		buf->code	= icd->current_fmt->code;
		vb->width	= icd->user_width;
		vb->height	= icd->user_height;
		vb->field	= field;
		vb->state	= VIDEOBUF_NEEDS_INIT;
	}

	vb->size = bytes_per_line * vb->height;
	if (0 != vb->baddr && vb->bsize < vb->size) {
		ret = -EINVAL;
		goto out;
	}

	if (vb->state == VIDEOBUF_NEEDS_INIT) {
		ret = videobuf_iolock(vq, vb, NULL);
		if (ret)
			goto fail;

		vb->state = VIDEOBUF_PREPARED;
	}

	buf->inwork = 0;

	return 0;

fail:
	free_buffer(vq, buf);
out:
	buf->inwork = 0;
	return ret;
}
Exemplo n.º 10
0
static int mx2_videobuf_prepare(struct videobuf_queue *vq,
		struct videobuf_buffer *vb, enum v4l2_field field)
{
	struct soc_camera_device *icd = vq->priv_data;
	struct mx2_buffer *buf = container_of(vb, struct mx2_buffer, vb);
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
			icd->current_fmt->host_fmt);
	int ret = 0;

	dev_dbg(icd->parent, "%s (vb=0x%p) 0x%08lx %d\n", __func__,
		vb, vb->baddr, vb->bsize);

	if (bytes_per_line < 0)
		return bytes_per_line;

#ifdef DEBUG
	/*
	 * This can be useful if you want to see if we actually fill
	 * the buffer with something
	 */
	memset((void *)vb->baddr, 0xaa, vb->bsize);
#endif

	if (buf->code	!= icd->current_fmt->code ||
	    vb->width	!= icd->user_width ||
	    vb->height	!= icd->user_height ||
	    vb->field	!= field) {
		buf->code	= icd->current_fmt->code;
		vb->width	= icd->user_width;
		vb->height	= icd->user_height;
		vb->field	= field;
		vb->state	= VIDEOBUF_NEEDS_INIT;
	}

	vb->size = bytes_per_line * vb->height;
	if (vb->baddr && vb->bsize < vb->size) {
		ret = -EINVAL;
		goto out;
	}

	if (vb->state == VIDEOBUF_NEEDS_INIT) {
		ret = videobuf_iolock(vq, vb, NULL);
		if (ret)
			goto fail;

		vb->state = VIDEOBUF_PREPARED;
	}

	return 0;

fail:
	free_buffer(vq, buf);
out:
	return ret;
}
Exemplo n.º 11
0
static int omap1_videobuf_prepare(struct videobuf_queue *vq,
		struct videobuf_buffer *vb, enum v4l2_field field)
{
	struct soc_camera_device *icd = vq->priv_data;
	struct omap1_cam_buf *buf = container_of(vb, struct omap1_cam_buf, vb);
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
			icd->current_fmt->host_fmt);
	struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
	struct omap1_cam_dev *pcdev = ici->priv;
	int ret;

	if (bytes_per_line < 0)
		return bytes_per_line;

	WARN_ON(!list_empty(&vb->queue));

	BUG_ON(NULL == icd->current_fmt);

	buf->inwork = 1;

	if (buf->code != icd->current_fmt->code || vb->field != field ||
			vb->width  != icd->user_width ||
			vb->height != icd->user_height) {
		buf->code  = icd->current_fmt->code;
		vb->width  = icd->user_width;
		vb->height = icd->user_height;
		vb->field  = field;
		vb->state  = VIDEOBUF_NEEDS_INIT;
	}

	vb->size = bytes_per_line * vb->height;

	if (vb->baddr && vb->bsize < vb->size) {
		ret = -EINVAL;
		goto out;
	}

	if (vb->state == VIDEOBUF_NEEDS_INIT) {
		ret = videobuf_iolock(vq, vb, NULL);
		if (ret)
			goto fail;

		vb->state = VIDEOBUF_PREPARED;
	}
	buf->inwork = 0;

	return 0;
fail:
	free_buffer(vq, buf, pcdev->vb_mode);
out:
	buf->inwork = 0;
	return ret;
}
Exemplo n.º 12
0
static int mx3_videobuf_prepare(struct vb2_buffer *vb)
{
	struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue);
	struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
	struct mx3_camera_dev *mx3_cam = ici->priv;
	struct idmac_channel *ichan = mx3_cam->idmac_channel[0];
	struct scatterlist *sg;
	struct mx3_camera_buffer *buf;
	size_t new_size;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						icd->current_fmt->host_fmt);

	if (bytes_per_line < 0)
		return bytes_per_line;

	buf = to_mx3_vb(vb);
	sg = &buf->sg;

	new_size = bytes_per_line * icd->user_height;

	if (vb2_plane_size(vb, 0) < new_size) {
		dev_err(icd->dev.parent, "Buffer too small (%lu < %zu)\n",
			vb2_plane_size(vb, 0), new_size);
		return -ENOBUFS;
	}

	if (buf->state == CSI_BUF_NEEDS_INIT) {
		sg_dma_address(sg)	= vb2_dma_contig_plane_paddr(vb, 0);
		sg_dma_len(sg)		= new_size;

		buf->txd = ichan->dma_chan.device->device_prep_slave_sg(
			&ichan->dma_chan, sg, 1, DMA_FROM_DEVICE,
			DMA_PREP_INTERRUPT);
		if (!buf->txd)
			return -EIO;

		buf->txd->callback_param	= buf->txd;
		buf->txd->callback		= mx3_cam_dma_done;

		buf->state = CSI_BUF_PREPARED;
	}

	vb2_set_plane_payload(vb, 0, new_size);

	return 0;
}
Exemplo n.º 13
0
/*
 *  Videobuf operations
 */
static int mx2_videobuf_setup(struct videobuf_queue *vq, unsigned int *count,
			      unsigned int *size)
{
	struct soc_camera_device *icd = vq->priv_data;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
			icd->current_fmt->host_fmt);

	dev_dbg(icd->parent, "count=%d, size=%d\n", *count, *size);

	if (bytes_per_line < 0)
		return bytes_per_line;

	*size = bytes_per_line * icd->user_height;

	if (0 == *count)
		*count = 32;
	if (*size * *count > MAX_VIDEO_MEM * 1024 * 1024)
		*count = (MAX_VIDEO_MEM * 1024 * 1024) / *size;

	return 0;
}
Exemplo n.º 14
0
static int dma_align(int *width, int *height,
		const struct soc_mbus_pixelfmt *fmt,
		enum omap1_cam_vb_mode vb_mode, bool enlarge)
{
	s32 bytes_per_line = soc_mbus_bytes_per_line(*width, fmt);

	if (bytes_per_line < 0)
		return bytes_per_line;

	if (!is_dma_aligned(bytes_per_line, *height, vb_mode)) {
		unsigned int pxalign = __fls(bytes_per_line / *width);
		unsigned int salign  = DMA_FRAME_SHIFT(vb_mode) +
				DMA_ELEMENT_SHIFT - pxalign;
		unsigned int incr    = enlarge << salign;

		v4l_bound_align_image(width, 1, *width + incr, 0,
				height, 1, *height + incr, 0, salign);
		return 0;
	}
	return 1;
}
static s32 vi_bypass_bytes_per_line(u32 width,
				    const struct soc_mbus_pixelfmt *mf)
{
	return soc_mbus_bytes_per_line(width, mf);
}
Exemplo n.º 16
0
static void mx3_videobuf_queue(struct vb2_buffer *vb)
{
	struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue);
	struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
	struct mx3_camera_dev *mx3_cam = ici->priv;
	struct mx3_camera_buffer *buf = to_mx3_vb(vb);
	struct dma_async_tx_descriptor *txd = buf->txd;
	struct idmac_channel *ichan = to_idmac_chan(txd->chan);
	struct idmac_video_param *video = &ichan->params.video;
	dma_cookie_t cookie;
	u32 fourcc = icd->current_fmt->host_fmt->fourcc;
	unsigned long flags;

	/* This is the configuration of one sg-element */
	video->out_pixel_fmt	= fourcc_to_ipu_pix(fourcc);

	if (video->out_pixel_fmt == IPU_PIX_FMT_GENERIC) {
		/*
		 * If the IPU DMA channel is configured to transport
		 * generic 8-bit data, we have to set up correctly the
		 * geometry parameters upon the current pixel format.
		 * So, since the DMA horizontal parameters are expressed
		 * in bytes not pixels, convert these in the right unit.
		 */
		int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						icd->current_fmt->host_fmt);
		BUG_ON(bytes_per_line <= 0);

		video->out_width	= bytes_per_line;
		video->out_height	= icd->user_height;
		video->out_stride	= bytes_per_line;
	} else {
		/*
		 * For IPU known formats the pixel unit will be managed
		 * successfully by the IPU code
		 */
		video->out_width	= icd->user_width;
		video->out_height	= icd->user_height;
		video->out_stride	= icd->user_width;
	}

#ifdef DEBUG
	/* helps to see what DMA actually has written */
	if (vb2_plane_vaddr(vb, 0))
		memset(vb2_plane_vaddr(vb, 0), 0xaa, vb2_get_plane_payload(vb, 0));
#endif

	spin_lock_irqsave(&mx3_cam->lock, flags);
	list_add_tail(&buf->queue, &mx3_cam->capture);

	if (!mx3_cam->active)
		mx3_cam->active = buf;

	spin_unlock_irq(&mx3_cam->lock);

	cookie = txd->tx_submit(txd);
	dev_dbg(icd->dev.parent, "Submitted cookie %d DMA 0x%08x\n",
		cookie, sg_dma_address(&buf->sg));

	if (cookie >= 0)
		return;

	spin_lock_irq(&mx3_cam->lock);

	/* Submit error */
	list_del_init(&buf->queue);

	if (mx3_cam->active == buf)
		mx3_cam->active = NULL;

	spin_unlock_irqrestore(&mx3_cam->lock, flags);
	vb2_buffer_done(vb, VB2_BUF_STATE_ERROR);
}
Exemplo n.º 17
0
static int mx3_camera_set_fmt(struct soc_camera_device *icd,
			      struct v4l2_format *f)
{
	struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
	struct mx3_camera_dev *mx3_cam = ici->priv;
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	const struct soc_camera_format_xlate *xlate;
	struct v4l2_pix_format *pix = &f->fmt.pix;
	struct v4l2_mbus_framefmt mf;
	int ret;

	xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
	if (!xlate) {
		dev_warn(icd->dev.parent, "Format %x not found\n",
			 pix->pixelformat);
		return -EINVAL;
	}

	stride_align(&pix->width);
	dev_dbg(icd->dev.parent, "Set format %dx%d\n", pix->width, pix->height);

	/*
	 * Might have to perform a complete interface initialisation like in
	 * ipu_csi_init_interface() in mxc_v4l2_s_param(). Also consider
	 * mxc_v4l2_s_fmt()
	 */

	configure_geometry(mx3_cam, pix->width, pix->height, xlate->code);

	mf.width	= pix->width;
	mf.height	= pix->height;
	mf.field	= pix->field;
	mf.colorspace	= pix->colorspace;
	mf.code		= xlate->code;

	ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
	if (ret < 0)
		return ret;

	if (mf.code != xlate->code)
		return -EINVAL;

	if (!mx3_cam->idmac_channel[0]) {
		ret = acquire_dma_channel(mx3_cam);
		if (ret < 0)
			return ret;
	}

	pix->width		= mf.width;
	pix->height		= mf.height;
	pix->field		= mf.field;
	mx3_cam->field		= mf.field;
	pix->colorspace		= mf.colorspace;
	icd->current_fmt	= xlate;

	pix->bytesperline = soc_mbus_bytes_per_line(pix->width,
						    xlate->host_fmt);
	if (pix->bytesperline < 0)
		return pix->bytesperline;
	pix->sizeimage = pix->height * pix->bytesperline;

	dev_dbg(icd->dev.parent, "Sensor set %dx%d\n", pix->width, pix->height);

	return ret;
}
Exemplo n.º 18
0
static int jz4780_videobuf_prepare(struct videobuf_queue *vq,
		struct videobuf_buffer *vb, enum v4l2_field field)
{
	struct soc_camera_device *icd = vq->priv_data;
	struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
	struct jz4780_camera_dev *pcdev = ici->priv;
	struct jz4780_buffer *buf = container_of(vb, struct jz4780_buffer, vb);
	int ret;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						icd->current_fmt->host_fmt);

	if (bytes_per_line < 0)
		return bytes_per_line;

	dprintk(7, "%s (vb=0x%p) 0x%08lx %d\n", __func__,
		vb, vb->baddr, vb->bsize);

	/* Added list head initialization on alloc */
	WARN_ON(!list_empty(&vb->queue));

	BUG_ON(NULL == icd->current_fmt);

	/*
	 * I think, in buf_prepare you only have to protect global data,
	 * the actual buffer is yours
	 */
	buf->inwork = 1;

	if (buf->code	!= icd->current_fmt->code ||
	    vb->width	!= icd->user_width ||
	    vb->height	!= icd->user_height ||
	    vb->field	!= field) {
		buf->code	= icd->current_fmt->code;
		vb->width	= icd->user_width;
		vb->height	= icd->user_height;
		vb->field	= field;
		vb->state	= VIDEOBUF_NEEDS_INIT;
	}

	vb->size = bytes_per_line * vb->height;
	if (0 != vb->baddr && vb->bsize < vb->size) {
		ret = -EINVAL;
		goto out;
	}

	if (vb->state == VIDEOBUF_NEEDS_INIT) {
		if(pcdev->is_tlb_enabled == 0) {
			ret = videobuf_iolock(vq, vb, NULL);
			if (ret) {
				dprintk(3, "%s error!\n", __FUNCTION__);
				goto fail;
			}
		}
		vb->state = VIDEOBUF_PREPARED;
	}
	buf->inwork = 0;
	return 0;

fail:
	free_buffer(vq, buf);
out:
	buf->inwork = 0;
	return ret;
}
/* should be called with unicam_dev->lock held */
static int unicam_camera_update_buf(struct unicam_camera_dev *unicam_dev)
{

	struct v4l2_subdev *sd = soc_camera_to_subdev(unicam_dev->icd);
	struct buffer_desc im0, dat0;
	dma_addr_t phys_addr;
	unsigned int line_stride;
	struct v4l2_format thumb_fmt;
	struct v4l2_pix_format *pix;
	int thumb = 0, ret;

	pr_debug("-enter");

	if (!unicam_dev->active) {
		pr_err("%s: Invalid buffer to Update", __func__);
		return -ENOMEM;
	}

	phys_addr = vb2_plane_dma_addr(unicam_dev->active, 0);
	pr_debug("updating buffer phys=0x%p", (void *)phys_addr);
	if (!phys_addr) {
		unicam_dev->active = NULL;
		pr_err("No valid address. skip capture\n");
		return -ENOMEM;
	}
	/* For crop use-cases only linestride matters that too only for non-JPEG cases */

	/* stride is in bytes */
	if (unicam_dev->icd->current_fmt->code != V4L2_MBUS_FMT_JPEG_1X8) {
		if((unicam_dev->crop.c.top == 0) || (unicam_dev->crop.c.left == 0)){
			/* Any one zero means no centering
			   Reject all such crop attempts */
			line_stride =
			    soc_mbus_bytes_per_line(unicam_dev->icd->user_width,
						    unicam_dev->icd->
						    current_fmt->host_fmt);
		} else {
			line_stride =
			    soc_mbus_bytes_per_line(unicam_dev->crop.c.width,
						    unicam_dev->icd->
						    current_fmt->host_fmt);
		}
		/* Non JPEG section of the code */
		/* image 0 */
		im0.start = (UInt32) phys_addr;
		im0.ls = (UInt32) line_stride;
		im0.size = line_stride * unicam_dev->icd->user_height;
		im0.wrap_en = 1;

		/* Coverity Fix: Dead Code */
		/* if(unicam_dev->b_mode == BUFFER_DOUBLE && phys_addr1){ */
			/* image 1 */
		/*	im1.start = phys_addr1;
			im1.ls = im0.ls;
			im1.size = im0.size;
			mm_csi0_update_addr(&im0, &im1, NULL, NULL);
		} else { */
		mm_csi0_update_addr(&im0, NULL, NULL, NULL);
		pr_debug("Adr 0x%x ls 0x%x size 0x%x\n", im0.start,
				im0.ls, im0.size);
		/* } */
	} else {
		/* JPEG section always in DAT0 */
		/* check whether sensor supports thumbnail */
		ret =
		    v4l2_subdev_call(sd, core, ioctl, VIDIOC_THUMB_SUPPORTED,
				     (void *)&thumb);

		if ((!ret) && thumb) {

			ret =
			    v4l2_subdev_call(sd, core, ioctl,
					     VIDIOC_THUMB_G_FMT,
					     (void *)&thumb_fmt);
			if (ret < 0) {
				dev_err(unicam_dev->dev,
					"sensor driver should report thumbnail format\n");
				return -1;
			}
			/* image 0 */
			pix = &thumb_fmt.fmt.pix;
        	        line_stride = unicam_dev->icd->user_width;
			im0.start = (UInt32) phys_addr;
			im0.ls = (UInt32) pix->bytesperline;
			im0.size = pix->sizeimage;
			im0.wrap_en = 1;

			/* DAT0 to an address after THUMB */
			dat0.start = (UInt32) ((char *)phys_addr + pix->sizeimage);
			dat0.ls = line_stride;
			dat0.size = line_stride * unicam_dev->icd->user_height * 3/2;
			mm_csi0_update_addr(&im0, NULL, &dat0, NULL);
		} else {
			/* no thumbnail supported */
			/* don't set image0 since we are expecting data0
			 * to contain jpeg data
			 */
			dat0.start = (UInt32) phys_addr;
			dat0.ls = unicam_dev->icd->user_width;
			dat0.size = unicam_dev->icd->user_width * unicam_dev->icd->user_height * 3/2;
		}
	}
	pr_debug("-exit");
	return 0;
}
/* should be called with unicam_dev->lock held */
static int unicam_camera_update_buf(struct unicam_camera_dev *unicam_dev)
{

	struct v4l2_subdev *sd = soc_camera_to_subdev(unicam_dev->icd);
	CSL_CAM_BUFFER_st_t cslCamBuffer0;
	CSL_CAM_BUFFER_st_t cslCamBuffer1;
	CSL_CAM_BUFFER_st_t cslCamBufferData0;
	CSL_CAM_BUFFER_st_t cslCamBufferData1;
	dma_addr_t phys_addr;
	unsigned int line_stride;
	struct v4l2_format thumb_fmt;
	struct v4l2_pix_format *pix;
	int thumb = 0, ret;

	dprintk("-enter");

	if (!unicam_dev->active) {
		pr_debug("%s unicam_camera_update_buf no active buffer found:"
			" WARNING\n", __func__);
		return -ENOMEM;
	}

	phys_addr = vb2_dma_contig_plane_dma_addr(unicam_dev->active, 0);

	dprintk("updating buffer phys=0x%p", (void *)phys_addr);

	/* stride is in bytes */
	if (unicam_dev->icd->current_fmt->code != V4L2_MBUS_FMT_JPEG_1X8) {
		line_stride =
		    soc_mbus_bytes_per_line(unicam_dev->icd->user_width,
					    unicam_dev->icd->
					    current_fmt->host_fmt);
		/* image 0 */
		cslCamBuffer0.start_addr = (UInt32) phys_addr;
		cslCamBuffer0.line_stride = (UInt32) line_stride;
		cslCamBuffer0.size = line_stride * unicam_dev->icd->user_height;
		cslCamBuffer0.buffer_wrap_en = 1;
		cslCamBuffer0.mem_type = CSL_CAM_MEM_TYPE_NONE;

		dprintk
		    ("cslCamBuffer0 start_addr = 0x%x, line_stride = %d, size = %d \n",
		     cslCamBuffer0.start_addr, cslCamBuffer0.line_stride,
		     cslCamBuffer0.size);
	} else {
		/* check whether sensor supports thumbnail */
		ret =
		    v4l2_subdev_call(sd, core, ioctl, VIDIOC_THUMB_SUPPORTED,
				     (void *)&thumb);

		if ((!ret) && thumb) {

			ret =
			    v4l2_subdev_call(sd, core, ioctl,
					     VIDIOC_THUMB_G_FMT,
					     (void *)&thumb_fmt);
			if (ret < 0) {
				dev_err(unicam_dev->dev,
					"sensor driver should report thumbnail format\n");
				return -1;
			}
			/* image 0 */
			pix = &thumb_fmt.fmt.pix;
			cslCamBuffer0.start_addr = (UInt32) phys_addr;
			cslCamBuffer0.line_stride = (UInt32) pix->bytesperline;
			cslCamBuffer0.size = pix->sizeimage;
			cslCamBuffer0.buffer_wrap_en = 1;
			cslCamBuffer0.mem_type = CSL_CAM_MEM_TYPE_NONE;
		} else {
			/* no thumbnail supported */
			/* don't set image0 since we are expecting data0
			 * to contain jpeg data
			 */
			cslCamBuffer0.start_addr = 0;
			cslCamBuffer0.line_stride = 0;
			cslCamBuffer0.size = 0;
			cslCamBuffer0.buffer_wrap_en = 0;
			cslCamBuffer0.mem_type = CSL_CAM_MEM_TYPE_NONE;
		}
	}

	/* image 1 */
	cslCamBuffer1.start_addr = 0;
	cslCamBuffer1.line_stride = cslCamBuffer0.line_stride;
	cslCamBuffer1.size = cslCamBuffer0.size;
	cslCamBuffer1.buffer_wrap_en = cslCamBuffer0.buffer_wrap_en;
	cslCamBuffer1.mem_type = cslCamBuffer0.mem_type;

	/* set date buffer 0 */
	if (unicam_dev->icd->current_fmt->code != V4L2_MBUS_FMT_JPEG_1X8) {
		cslCamBufferData0.start_addr = (UInt32) 0;
		cslCamBufferData0.line_stride = 0;
		cslCamBufferData0.buffer_wrap_en = 0;
		cslCamBufferData0.size = 0;
		cslCamBufferData0.mem_type = cslCamBuffer0.mem_type;
	} else {

		pix = &thumb_fmt.fmt.pix;
		line_stride = unicam_dev->icd->user_width;
		/* check if thumbnail is supported */
		if (thumb)
			cslCamBufferData0.start_addr =
			    (UInt32) ((char *)phys_addr + pix->sizeimage);
		else
			cslCamBufferData0.start_addr = (UInt32) phys_addr;

		cslCamBufferData0.line_stride = (UInt32) line_stride;
		/* assume 12bpp */
		cslCamBufferData0.size =
		    (line_stride * unicam_dev->icd->user_height * 3 / 2);
		cslCamBufferData0.buffer_wrap_en = 1;
		cslCamBufferData0.mem_type = CSL_CAM_MEM_TYPE_NONE;
		dprintk
		    ("cslCamBufferData0 start_addr = 0x%x, line_stride = %d, size = %d\n",
		     cslCamBufferData0.start_addr,
		     cslCamBufferData0.line_stride, cslCamBufferData0.size);
	}

	/* set data buffer 1 */
	cslCamBufferData1.start_addr = 0;

	if (cslCamBuffer1.start_addr != 0) {
		if (csl_cam_set_input_addr
		    (unicam_dev->cslCamHandle, &cslCamBuffer0, &cslCamBuffer1,
		     &cslCamBufferData0)) {

			dev_err(unicam_dev->dev,
				"csl_cam_set_input_addr(): FAILED\n");
			return -1;
		}
	} else {
		if (csl_cam_set_input_addr
		    (unicam_dev->cslCamHandle, &cslCamBuffer0, NULL,
		     &cslCamBufferData0)) {

			dev_err(unicam_dev->dev,
				"CamCslSetInputIntf(): csl_cam_set_input_addr(): FAILED\n");
			return -1;
		}
	}
	dprintk("-exit");
	return 0;
}
Exemplo n.º 21
0
static int mx2_camera_set_bus_param(struct soc_camera_device *icd,
		__u32 pixfmt)
{
	struct soc_camera_host *ici =
		to_soc_camera_host(icd->dev.parent);
	struct mx2_camera_dev *pcdev = ici->priv;
	unsigned long camera_flags, common_flags;
	int ret = 0;
	int bytesperline;
	u32 csicr1 = pcdev->csicr1;

	camera_flags = icd->ops->query_bus_param(icd);

	common_flags = soc_camera_bus_param_compatible(camera_flags,
				MX2_BUS_FLAGS);
	if (!common_flags)
		return -EINVAL;

	if ((common_flags & SOCAM_HSYNC_ACTIVE_HIGH) &&
	    (common_flags & SOCAM_HSYNC_ACTIVE_LOW)) {
		if (pcdev->platform_flags & MX2_CAMERA_HSYNC_HIGH)
			common_flags &= ~SOCAM_HSYNC_ACTIVE_LOW;
		else
			common_flags &= ~SOCAM_HSYNC_ACTIVE_HIGH;
	}

	if ((common_flags & SOCAM_PCLK_SAMPLE_RISING) &&
	    (common_flags & SOCAM_PCLK_SAMPLE_FALLING)) {
		if (pcdev->platform_flags & MX2_CAMERA_PCLK_SAMPLE_RISING)
			common_flags &= ~SOCAM_PCLK_SAMPLE_FALLING;
		else
			common_flags &= ~SOCAM_PCLK_SAMPLE_RISING;
	}

	ret = icd->ops->set_bus_param(icd, common_flags);
	if (ret < 0)
		return ret;

	if (common_flags & SOCAM_PCLK_SAMPLE_RISING)
		csicr1 |= CSICR1_REDGE;
	if (common_flags & SOCAM_PCLK_SAMPLE_FALLING)
		csicr1 |= CSICR1_INV_PCLK;
	if (common_flags & SOCAM_VSYNC_ACTIVE_HIGH)
		csicr1 |= CSICR1_SOF_POL;
	if (common_flags & SOCAM_HSYNC_ACTIVE_HIGH)
		csicr1 |= CSICR1_HSYNC_POL;
	if (pcdev->platform_flags & MX2_CAMERA_SWAP16)
		csicr1 |= CSICR1_SWAP16_EN;
	if (pcdev->platform_flags & MX2_CAMERA_EXT_VSYNC)
		csicr1 |= CSICR1_EXT_VSYNC;
	if (pcdev->platform_flags & MX2_CAMERA_CCIR)
		csicr1 |= CSICR1_CCIR_EN;
	if (pcdev->platform_flags & MX2_CAMERA_CCIR_INTERLACE)
		csicr1 |= CSICR1_CCIR_MODE;
	if (pcdev->platform_flags & MX2_CAMERA_GATED_CLOCK)
		csicr1 |= CSICR1_GCLK_MODE;
	if (pcdev->platform_flags & MX2_CAMERA_INV_DATA)
		csicr1 |= CSICR1_INV_DATA;
	if (pcdev->platform_flags & MX2_CAMERA_PACK_DIR_MSB)
		csicr1 |= CSICR1_PACK_DIR;

	pcdev->csicr1 = csicr1;

	bytesperline = soc_mbus_bytes_per_line(icd->user_width,
			icd->current_fmt->host_fmt);
	if (bytesperline < 0)
		return bytesperline;

	if (mx27_camera_emma(pcdev)) {
		ret = mx27_camera_emma_prp_reset(pcdev);
		if (ret)
			return ret;

		if (pcdev->discard_buffer)
			dma_free_coherent(ici->v4l2_dev.dev,
				pcdev->discard_size, pcdev->discard_buffer,
				pcdev->discard_buffer_dma);

		/*
		 * I didn't manage to properly enable/disable the prp
		 * on a per frame basis during running transfers,
		 * thus we allocate a buffer here and use it to
		 * discard frames when no buffer is available.
		 * Feel free to work on this ;)
		 */
		pcdev->discard_size = icd->user_height * bytesperline;
		pcdev->discard_buffer = dma_alloc_coherent(ici->v4l2_dev.dev,
				pcdev->discard_size, &pcdev->discard_buffer_dma,
				GFP_KERNEL);
		if (!pcdev->discard_buffer)
			return -ENOMEM;

		mx27_camera_emma_buf_init(icd, bytesperline);
	} else if (cpu_is_mx25()) {
		writel((bytesperline * icd->user_height) >> 2,
				pcdev->base_csi + CSIRXCNT);
		writel((bytesperline << 16) | icd->user_height,
				pcdev->base_csi + CSIIMAG_PARA);
	}

	writel(pcdev->csicr1, pcdev->base_csi + CSICR1);

	return 0;
}
Exemplo n.º 22
0
static int mx2_camera_try_fmt(struct soc_camera_device *icd,
				  struct v4l2_format *f)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	const struct soc_camera_format_xlate *xlate;
	struct v4l2_pix_format *pix = &f->fmt.pix;
	struct v4l2_mbus_framefmt mf;
	__u32 pixfmt = pix->pixelformat;
	unsigned int width_limit;
	int ret;

	xlate = soc_camera_xlate_by_fourcc(icd, pixfmt);
	if (pixfmt && !xlate) {
		dev_warn(icd->parent, "Format %x not found\n", pixfmt);
		return -EINVAL;
	}

	/* FIXME: implement MX27 limits */

	/* limit to MX25 hardware capabilities */
	if (cpu_is_mx25()) {
		if (xlate->host_fmt->bits_per_sample <= 8)
			width_limit = 0xffff * 4;
		else
			width_limit = 0xffff * 2;
		/* CSIIMAG_PARA limit */
		if (pix->width > width_limit)
			pix->width = width_limit;
		if (pix->height > 0xffff)
			pix->height = 0xffff;

		pix->bytesperline = soc_mbus_bytes_per_line(pix->width,
				xlate->host_fmt);
		if (pix->bytesperline < 0)
			return pix->bytesperline;
		pix->sizeimage = pix->height * pix->bytesperline;
		/* Check against the CSIRXCNT limit */
		if (pix->sizeimage > 4 * 0x3ffff) {
			/* Adjust geometry, preserve aspect ratio */
			unsigned int new_height = int_sqrt(4 * 0x3ffff *
					pix->height / pix->bytesperline);
			pix->width = new_height * pix->width / pix->height;
			pix->height = new_height;
			pix->bytesperline = soc_mbus_bytes_per_line(pix->width,
							xlate->host_fmt);
			BUG_ON(pix->bytesperline < 0);
		}
	}

	/* limit to sensor capabilities */
	mf.width	= pix->width;
	mf.height	= pix->height;
	mf.field	= pix->field;
	mf.colorspace	= pix->colorspace;
	mf.code		= xlate->code;

	ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf);
	if (ret < 0)
		return ret;

	if (mf.field == V4L2_FIELD_ANY)
		mf.field = V4L2_FIELD_NONE;
	if (mf.field != V4L2_FIELD_NONE) {
		dev_err(icd->parent, "Field type %d unsupported.\n",
				mf.field);
		return -EINVAL;
	}

	pix->width	= mf.width;
	pix->height	= mf.height;
	pix->field	= mf.field;
	pix->colorspace	= mf.colorspace;

	return 0;
}
Exemplo n.º 23
0
static int mx2_camera_set_bus_param(struct soc_camera_device *icd,
		__u32 pixfmt)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
	struct mx2_camera_dev *pcdev = ici->priv;
	struct v4l2_mbus_config cfg = {.type = V4L2_MBUS_PARALLEL,};
	unsigned long common_flags;
	int ret;
	int bytesperline;
	u32 csicr1 = pcdev->csicr1;

	ret = v4l2_subdev_call(sd, video, g_mbus_config, &cfg);
	if (!ret) {
		common_flags = soc_mbus_config_compatible(&cfg, MX2_BUS_FLAGS);
		if (!common_flags) {
			dev_warn(icd->parent,
				 "Flags incompatible: camera 0x%x, host 0x%x\n",
				 cfg.flags, MX2_BUS_FLAGS);
			return -EINVAL;
		}
	} else if (ret != -ENOIOCTLCMD) {
		return ret;
	} else {
		common_flags = MX2_BUS_FLAGS;
	}

	if ((common_flags & V4L2_MBUS_HSYNC_ACTIVE_HIGH) &&
	    (common_flags & V4L2_MBUS_HSYNC_ACTIVE_LOW)) {
		if (pcdev->platform_flags & MX2_CAMERA_HSYNC_HIGH)
			common_flags &= ~V4L2_MBUS_HSYNC_ACTIVE_LOW;
		else
			common_flags &= ~V4L2_MBUS_HSYNC_ACTIVE_HIGH;
	}

	if ((common_flags & V4L2_MBUS_PCLK_SAMPLE_RISING) &&
	    (common_flags & V4L2_MBUS_PCLK_SAMPLE_FALLING)) {
		if (pcdev->platform_flags & MX2_CAMERA_PCLK_SAMPLE_RISING)
			common_flags &= ~V4L2_MBUS_PCLK_SAMPLE_FALLING;
		else
			common_flags &= ~V4L2_MBUS_PCLK_SAMPLE_RISING;
	}

	cfg.flags = common_flags;
	ret = v4l2_subdev_call(sd, video, s_mbus_config, &cfg);
	if (ret < 0 && ret != -ENOIOCTLCMD) {
		dev_dbg(icd->parent, "camera s_mbus_config(0x%lx) returned %d\n",
			common_flags, ret);
		return ret;
	}

	if (common_flags & V4L2_MBUS_PCLK_SAMPLE_RISING)
		csicr1 |= CSICR1_REDGE;
	if (common_flags & V4L2_MBUS_VSYNC_ACTIVE_HIGH)
		csicr1 |= CSICR1_SOF_POL;
	if (common_flags & V4L2_MBUS_HSYNC_ACTIVE_HIGH)
		csicr1 |= CSICR1_HSYNC_POL;
	if (pcdev->platform_flags & MX2_CAMERA_SWAP16)
		csicr1 |= CSICR1_SWAP16_EN;
	if (pcdev->platform_flags & MX2_CAMERA_EXT_VSYNC)
		csicr1 |= CSICR1_EXT_VSYNC;
	if (pcdev->platform_flags & MX2_CAMERA_CCIR)
		csicr1 |= CSICR1_CCIR_EN;
	if (pcdev->platform_flags & MX2_CAMERA_CCIR_INTERLACE)
		csicr1 |= CSICR1_CCIR_MODE;
	if (pcdev->platform_flags & MX2_CAMERA_GATED_CLOCK)
		csicr1 |= CSICR1_GCLK_MODE;
	if (pcdev->platform_flags & MX2_CAMERA_INV_DATA)
		csicr1 |= CSICR1_INV_DATA;
	if (pcdev->platform_flags & MX2_CAMERA_PACK_DIR_MSB)
		csicr1 |= CSICR1_PACK_DIR;

	pcdev->csicr1 = csicr1;

	bytesperline = soc_mbus_bytes_per_line(icd->user_width,
			icd->current_fmt->host_fmt);
	if (bytesperline < 0)
		return bytesperline;

	if (mx27_camera_emma(pcdev)) {
		ret = mx27_camera_emma_prp_reset(pcdev);
		if (ret)
			return ret;

		if (pcdev->discard_buffer)
			dma_free_coherent(ici->v4l2_dev.dev,
				pcdev->discard_size, pcdev->discard_buffer,
				pcdev->discard_buffer_dma);

		/*
		 * I didn't manage to properly enable/disable the prp
		 * on a per frame basis during running transfers,
		 * thus we allocate a buffer here and use it to
		 * discard frames when no buffer is available.
		 * Feel free to work on this ;)
		 */
		pcdev->discard_size = icd->user_height * bytesperline;
		pcdev->discard_buffer = dma_alloc_coherent(ici->v4l2_dev.dev,
				pcdev->discard_size, &pcdev->discard_buffer_dma,
				GFP_KERNEL);
		if (!pcdev->discard_buffer)
			return -ENOMEM;

		mx27_camera_emma_buf_init(icd, bytesperline);
	} else if (cpu_is_mx25()) {
		writel((bytesperline * icd->user_height) >> 2,
				pcdev->base_csi + CSIRXCNT);
		writel((bytesperline << 16) | icd->user_height,
				pcdev->base_csi + CSIIMAG_PARA);
	}

	writel(pcdev->csicr1, pcdev->base_csi + CSICR1);

	return 0;
}
Exemplo n.º 24
0
/**
 * @brief: Called when application apply buffers, camera buffer initial.
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *vq: V4L2  buffer queue information structure
 * @param [in] *vb: V4L2  buffer information structure
 * @param [in] field: V4L2_FIELD_ANY 
 */
static int ak_videobuf_prepare(struct videobuf_queue *vq,
			struct videobuf_buffer *vb, enum v4l2_field field)
{
	struct soc_camera_device *icd = vq->priv_data;
	struct ak_buffer *buf = container_of(vb, struct ak_buffer, vb);
	int ret;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						icd->current_fmt->host_fmt);

	isp_dbg("%s (vb=0x%p) buf[%d] vb->baddr=0x%08lx vb->bsize=%d bytes_per_line=%d\n",
			__func__, vb, vb->i, vb->baddr, vb->bsize, bytes_per_line);

	bytes_per_line = icd->user_width * 3 /2;

	if (bytes_per_line < 0)
		return bytes_per_line;

	/* Added list head initialization on alloc */
	WARN_ON(!list_empty(&vb->queue));

#if 0
//#ifdef ISP_DEBUG	
	/*	 
	* This can be useful if you want to see if we actually fill	 
	* the buffer with something	 
	*/
	memset((void *)vb->baddr, 0xaa, vb->bsize);
#endif

	BUG_ON(NULL == icd->current_fmt);
	
	/* I think, in buf_prepare you only have to protect global data,
	 * the actual buffer is yours */
	buf->inwork = 1;
	
	if (buf->code	!= icd->current_fmt->code ||
	    vb->width	!= icd->user_width ||
	    vb->height	!= icd->user_height ||
	    vb->field	!= field) {
		buf->code	= icd->current_fmt->code;
		vb->width	= icd->user_width;
		vb->height	= icd->user_height;
		vb->field	= field;
		vb->state	= VIDEOBUF_NEEDS_INIT;
	}

	vb->size = bytes_per_line * vb->height;
	if (0 != vb->baddr && vb->bsize < vb->size) {
		ret = -EINVAL;
		goto out;
	}

	if (vb->state == VIDEOBUF_NEEDS_INIT) {
		ret = videobuf_iolock(vq, vb, NULL);
		if (ret)
			goto fail;

		vb->state = VIDEOBUF_PREPARED;
	}

	buf->inwork = 0;

	return 0;
	
fail:
	free_buffer(vq, buf);
out:
	buf->inwork = 0;
	return ret;
}