Exemplo n.º 1
0
static int fimc_isp_subdev_s_stream(struct v4l2_subdev *sd, int on)
{
	struct fimc_isp *isp = v4l2_get_subdevdata(sd);
	struct fimc_is *is = fimc_isp_to_is(isp);
	int ret;

	isp_dbg(1, sd, "%s: on: %d\n", __func__, on);

	if (!test_bit(IS_ST_INIT_DONE, &is->state))
		return -EBUSY;

	fimc_is_mem_barrier();

	if (on) {
		if (__get_pending_param_count(is)) {
			ret = fimc_is_itf_s_param(is, true);
			if (ret < 0)
				return ret;
		}

		isp_dbg(1, sd, "changing mode to %d\n", is->config_index);

		ret = fimc_is_itf_mode_change(is);
		if (ret)
			return -EINVAL;

		clear_bit(IS_ST_STREAM_ON, &is->state);
		fimc_is_hw_stream_on(is);
		ret = fimc_is_wait_event(is, IS_ST_STREAM_ON, 1,
					 FIMC_IS_CONFIG_TIMEOUT);
		if (ret < 0) {
			v4l2_err(sd, "stream on timeout\n");
			return ret;
		}
	} else {
		clear_bit(IS_ST_STREAM_OFF, &is->state);
		fimc_is_hw_stream_off(is);
		ret = fimc_is_wait_event(is, IS_ST_STREAM_OFF, 1,
					 FIMC_IS_CONFIG_TIMEOUT);
		if (ret < 0) {
			v4l2_err(sd, "stream off timeout\n");
			return ret;
		}
		is->setfile.sub_index = 0;
	}

	return 0;
}
Exemplo n.º 2
0
/**
 * @brief:  for ak_videobuf_release, free buffer if camera stopped.
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *vq: V4L2 buffer queue information structure
 * @param [in] *buf: ak camera drivers structure, include struct videobuf_buffer 
 */
static void free_buffer(struct videobuf_queue *vq, struct ak_buffer *buf)
{
	struct soc_camera_device *icd = vq->priv_data;
	struct videobuf_buffer *vb = &buf->vb;
	struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
	struct ak_camera_dev *pcdev = ici->priv;	

	isp_dbg("%s (vb=0x%p) buf[%d] 0x%08lx %d\n", 
			__func__, vb, vb->i, vb->baddr, vb->bsize);
	
	BUG_ON(in_interrupt());

	/* This waits until this buffer is out of danger, i.e., until it is no
	 * longer in STATE_QUEUED or STATE_ACTIVE */
	if (vb->state == VIDEOBUF_ACTIVE && !pcdev->dma_running) {
		printk("free_buffer: dma_running=%d, doesn't neee to wait\n", pcdev->dma_running);
		//vb->state = VIDEOBUF_ERROR;
		list_del(&vb->queue);
	} else {
		vb->state = VIDEOBUF_DONE;
		videobuf_waiton(vq, vb, 0, 0);
	}
	videobuf_dma_contig_free(vq, vb);

	vb->state = VIDEOBUF_NEEDS_INIT;
}
Exemplo n.º 3
0
static int ak_camera_set_crop(struct soc_camera_device *icd,
			       struct v4l2_crop *crop)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
	struct ak_camera_dev *pcdev = ici->priv;
	int ret, width, height;

	isp_dbg("entry %s\n", __func__);
	
	if (pcdev->dma_running) {
		/* make sure streaming is not started */
		v4l2_err(&ici->v4l2_dev,
			"Cannot change crop when streaming is ON\n");
		return -EBUSY;
	}

	width = crop->c.width - crop->c.left;
	height = crop->c.height - crop->c.top;
	if ((crop->c.top < 0 || crop->c.left < 0)
		||(((width * 3) < 18) || (height * 3) < 18)
		||((width > 1280) || (height > 720))) {
		v4l2_err(&ici->v4l2_dev,
			"doesn't support negative values for top & left\n");
		return -EINVAL;
	}

	if ((ret = isp_set_crop(&pcdev->isp, crop->c)) < 0)
		ret = v4l2_subdev_call(sd, video, s_crop, crop);

	return ret;
}
Exemplo n.º 4
0
/**
 * @brief:  for ak_videobuf_release, free buffer if camera stopped.
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *vq: V4L2 buffer queue information structure
 * @param [in] *vb: V4L2  buffer information structure
 */
static void ak_videobuf_release(struct videobuf_queue *vq, 
					struct videobuf_buffer *vb)
{
	struct ak_buffer *buf = container_of(vb, struct ak_buffer, vb);	
	struct soc_camera_device *icd = vq->priv_data;
//	struct device *dev = icd->parent;
	struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
	struct ak_camera_dev *pcdev = ici->priv;
	unsigned long flags;
	
	isp_dbg("%s (vb=0x%p) buf[%d] 0x%08lx %d\n", 
			__func__, vb, vb->i, vb->baddr, vb->bsize);

	spin_lock_irqsave(&pcdev->lock, flags);
	isp_clear_irq(&pcdev->isp);
	spin_unlock_irqrestore(&pcdev->lock, flags);

	switch (vb->state) {
	case VIDEOBUF_ACTIVE:
		CAMDBG("vb status: ACTIVE\n");
		break;
	case VIDEOBUF_QUEUED:
		CAMDBG("vb status: QUEUED\n");
		break;
	case VIDEOBUF_PREPARED:
		CAMDBG("vb status: PREPARED\n");
		break;
	default:
		CAMDBG("vb status: unknown\n");
		break;
	}

	free_buffer(vq, buf);
}
Exemplo n.º 5
0
/**
 * @brief:  Called when application apply buffers, camera buffer initial.
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *vq: V4L2 buffer queue information structure
 * @param [in] *count: buffer's number
 * @param [in] *size: buffer total size
 */
static int ak_videobuf_setup(struct videobuf_queue *vq, unsigned int *count, 
								unsigned int *size)
{
	struct soc_camera_device *icd = vq->priv_data;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						icd->current_fmt->host_fmt);

	bytes_per_line = icd->user_width * 3 /2;
	if (bytes_per_line < 0)
		return bytes_per_line;

	*size = bytes_per_line * icd->user_height;

	if (*count < 4) {
		printk("if use video mode, vbuf num isn't less than 4\n");
		*count = 4;
	}

	if (*size * *count > CONFIG_VIDEO_RESERVED_MEM_SIZE)
		*count = (CONFIG_VIDEO_RESERVED_MEM_SIZE) / *size;
	
	isp_dbg("%s count=%d, size=%d, bytes_per_line=%d\n",
			__func__, *count, *size, bytes_per_line);
	
	return 0;
}
Exemplo n.º 6
0
static int fimc_isp_subdev_set_fmt(struct v4l2_subdev *sd,
				   struct v4l2_subdev_fh *fh,
				   struct v4l2_subdev_format *fmt)
{
	struct fimc_isp *isp = v4l2_get_subdevdata(sd);
	struct fimc_is *is = fimc_isp_to_is(isp);
	struct v4l2_mbus_framefmt *mf = &fmt->format;
	int ret = 0;

	isp_dbg(1, sd, "%s: pad%d: code: 0x%x, %dx%d\n",
		 __func__, fmt->pad, mf->code, mf->width, mf->height);

	mutex_lock(&isp->subdev_lock);
	__isp_subdev_try_format(isp, fh, fmt);

	if (fmt->which == V4L2_SUBDEV_FORMAT_TRY) {
		mf = v4l2_subdev_get_try_format(fh, fmt->pad);
		*mf = fmt->format;

		/* Propagate format to the source pads */
		if (fmt->pad == FIMC_ISP_SD_PAD_SINK) {
			struct v4l2_subdev_format format = *fmt;
			unsigned int pad;

			for (pad = FIMC_ISP_SD_PAD_SRC_FIFO;
					pad < FIMC_ISP_SD_PADS_NUM; pad++) {
				format.pad = pad;
				__isp_subdev_try_format(isp, fh, &format);
				mf = v4l2_subdev_get_try_format(fh, pad);
				*mf = format.format;
			}
		}
	} else {
		if (sd->entity.stream_count == 0) {
			if (fmt->pad == FIMC_ISP_SD_PAD_SINK) {
				struct v4l2_subdev_format format = *fmt;

				isp->sink_fmt = *mf;

				format.pad = FIMC_ISP_SD_PAD_SRC_DMA;
				__isp_subdev_try_format(isp, fh, &format);

				isp->src_fmt = format.format;
				__is_set_frame_size(is, &isp->src_fmt);
			} else {
				isp->src_fmt = *mf;
			}
		} else {
			ret = -EBUSY;
		}
	}

	mutex_unlock(&isp->subdev_lock);
	return ret;
}
Exemplo n.º 7
0
/**
 * @brief: set camera capability, used for query by user.
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *ici: soc_camera_host information structure. 
 * @param [in] *cap: v4l2_capability information structure.
 */
static int ak_camera_querycap(struct soc_camera_host *ici,
			       struct v4l2_capability *cap)
{
	isp_dbg("entry %s\n", __func__);
	
	/* cap->name is set by the friendly caller:-> */
	strlcpy(cap->card, ak_cam_driver_description, sizeof(cap->card));
	cap->capabilities = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING;
	
	return 0;	
}
Exemplo n.º 8
0
static int ak_camera_get_crop(struct soc_camera_device *icd,
			       struct v4l2_crop *crop)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	//struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
	//struct ak_camera_dev *pcdev = ici->priv;

	isp_dbg("entry %s\n", __func__);
	
	return v4l2_subdev_call(sd, video, g_crop, crop);
}
Exemplo n.º 9
0
static int ak_camera_cropcap(struct soc_camera_device *icd, 
					struct v4l2_cropcap *crop)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	
	isp_dbg("enter %s\n", __func__);

	if (crop->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
		return -EINVAL;

	// isp support crop, need complete. 
	return v4l2_subdev_call(sd, video, cropcap, crop);
}
Exemplo n.º 10
0
/**
 * @brief: the isp standard control should be implemented here.
 * the function is image adjust, color effect...
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *ctrl: V4L2 image effect control information structure
 */
static int ak_camera_s_ctrl(struct v4l2_ctrl *ctrl)
{
	struct v4l2_control control;
	struct soc_camera_device *icd = ctrl_to_icd(ctrl);
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
	struct ak_camera_dev *pcdev = ici->priv;
	
	isp_dbg("entry %s\n", __func__);

	switch (ctrl->id) {
	case V4L2_CID_BRIGHTNESS:
		if (isp_set_brightness(&pcdev->isp, ctrl) == 0)
			return 0;
		break;
	case V4L2_CID_CONTRAST:
		if (isp_set_gamma(&pcdev->isp, ctrl) == 0)
			return 0;
		break;
	case V4L2_CID_SATURATION:
		if (isp_set_saturation(&pcdev->isp, ctrl) == 0)
			return 0;
		break;
	case V4L2_CID_SHARPNESS:
		if (isp_set_sharpness(&pcdev->isp, ctrl) == 0)
			return 0;
		break;
	case V4L2_CID_HUE:
		break;
	case V4L2_CID_HUE_AUTO:
		break;
	case V4L2_CID_COLORFX:
		if (isp_set_uspecial_effect(&pcdev->isp, ctrl, 0) == 0)
			return 0;
		break;
	case V4L2_CID_DO_WHITE_BALANCE:
		if (isp_manu_set_wb_param(&pcdev->isp, ctrl, 0) == 0)
			return 0;
		break;
	case V4L2_CID_AUTO_WHITE_BALANCE:
		if (isp_auto_set_wb_param(&pcdev->isp, ctrl) == 0)
			return 0;
		break;
	}
	
	control.id = ctrl->id;
	control.value = ctrl->val;
	v4l2_subdev_call(sd, core, s_ctrl, &control);
	
	return 0;
}
Exemplo n.º 11
0
/**
 * @brief: irq handler function, camera start data collection
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *pcdev:ak camera drivers structure, include soc camera structure
 */
static int ak_camera_setup_dma(struct ak_camera_dev *pcdev)
{
	struct videobuf_buffer *vb_active = &pcdev->active->vb;
	struct videobuf_buffer *vb;
	struct list_head *next;
	unsigned long yaddr_chl1_active, yaddr_chl2_active; 
	unsigned long yaddr_chl1_next, yaddr_chl2_next;
	int size;

	size = vb_active->width * vb_active->height;
	yaddr_chl1_active = videobuf_to_dma_contig(vb_active);
	yaddr_chl2_active = yaddr_chl1_active + size * 3 / 2;

	/* for single mode */
	if (!isp_is_continuous(&pcdev->isp)) {
		isp_set_even_frame(&pcdev->isp, yaddr_chl1_active, yaddr_chl2_active);
		isp_update_regtable(&pcdev->isp, 1);
		isp_start_capturing(&pcdev->isp);
		return 0;
	}

	/* ISP is in the continuous mode */
	next = pcdev->capture.next;
	next = next->next;
	if (next == &pcdev->capture) {
		isp_dbg("irq: the next vbuf is empty.\n");
		//isp_stop_capturing(&pcdev->isp);
		irq_buf_empty_flag = 1;
		irq_need_baffle = 1;
		pcdev->dma_running = 0;
		goto out;
	} else 
		irq_buf_empty_flag = 0;
	
	vb = list_entry(next, struct videobuf_buffer, queue);

	/* setup the DMA address for transferring */
	yaddr_chl1_next = videobuf_to_dma_contig(vb);
	yaddr_chl2_next = yaddr_chl1_next + size * 3 / 2;
	if (isp_is_capturing_odd(&pcdev->isp))
		isp_set_even_frame(&pcdev->isp, yaddr_chl1_next, yaddr_chl2_next);
	else 
		isp_set_odd_frame(&pcdev->isp, yaddr_chl1_next, yaddr_chl2_next);
out:	
	isp_update_regtable(&pcdev->isp, 0);
	return 0;
}
Exemplo n.º 12
0
/**
 * @brief: set sensor clock
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] cis_sclk: sensor work clock
 */
static void set_sensor_cis_sclk(unsigned int cis_sclk)
{
	unsigned long regval;
	unsigned int cis_sclk_div;
	
	unsigned int peri_pll = ak_get_peri_pll_clk()/1000000;
	
	cis_sclk_div = peri_pll/cis_sclk - 1;

	regval = REG32(CLOCK_PERI_PLL_CTRL2);
	regval &= ~(0x3f << 10);
	regval |= (cis_sclk_div << 10);
	REG32(CLOCK_PERI_PLL_CTRL2) = (1 << 19)|regval;

	isp_dbg("%s() cis_sclk=%dMHz peri_pll=%dMHz cis_sclk_div=%d\n", 
			__func__, cis_sclk, peri_pll, cis_sclk_div);
}
Exemplo n.º 13
0
static void isp_video_capture_buffer_queue(struct vb2_buffer *vb)
{
	struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
	struct fimc_isp *isp = vb2_get_drv_priv(vb->vb2_queue);
	struct fimc_is_video *video = &isp->video_capture;
	struct fimc_is *is = fimc_isp_to_is(isp);
	struct isp_video_buf *ivb = to_isp_video_buf(vbuf);
	unsigned long flags;
	unsigned int i;

	if (test_bit(ST_ISP_VID_CAP_BUF_PREP, &isp->state)) {
		spin_lock_irqsave(&is->slock, flags);
		video->buf_mask |= BIT(ivb->index);
		spin_unlock_irqrestore(&is->slock, flags);
	} else {
		unsigned int num_planes = video->format->memplanes;

		ivb->index = video->buf_count;
		video->buffers[ivb->index] = ivb;

		for (i = 0; i < num_planes; i++) {
			int buf_index = ivb->index * num_planes + i;

			ivb->dma_addr[i] = vb2_dma_contig_plane_dma_addr(vb, i);
			is->is_p_region->shared[32 + buf_index] =
							ivb->dma_addr[i];

			isp_dbg(2, &video->ve.vdev,
				"dma_buf %pad (%d/%d/%d) addr: %pad\n",
				&buf_index, ivb->index, i, vb->index,
				&ivb->dma_addr[i]);
		}

		if (++video->buf_count < video->reqbufs_count)
			return;

		video->buf_mask = (1UL << video->buf_count) - 1;
		set_bit(ST_ISP_VID_CAP_BUF_PREP, &isp->state);
	}

	if (!test_bit(ST_ISP_VID_CAP_STREAMING, &isp->state))
		isp_video_capture_start_streaming(vb->vb2_queue, 0);
}
Exemplo n.º 14
0
static int isp_video_capture_start_streaming(struct vb2_queue *q,
						unsigned int count)
{
	struct fimc_isp *isp = vb2_get_drv_priv(q);
	struct fimc_is *is = fimc_isp_to_is(isp);
	struct param_dma_output *dma = __get_isp_dma2(is);
	struct fimc_is_video *video = &isp->video_capture;
	int ret;

	if (!test_bit(ST_ISP_VID_CAP_BUF_PREP, &isp->state) ||
	    test_bit(ST_ISP_VID_CAP_STREAMING, &isp->state))
		return 0;


	dma->cmd = DMA_OUTPUT_COMMAND_ENABLE;
	dma->notify_dma_done = DMA_OUTPUT_NOTIFY_DMA_DONE_ENABLE;
	dma->buffer_address = is->is_dma_p_region +
				DMA2_OUTPUT_ADDR_ARRAY_OFFS;
	dma->buffer_number = video->reqbufs_count;
	dma->dma_out_mask = video->buf_mask;

	isp_dbg(2, &video->ve.vdev,
		"buf_count: %d, planes: %d, dma addr table: %#x\n",
		video->buf_count, video->format->memplanes,
		dma->buffer_address);

	fimc_is_mem_barrier();

	fimc_is_set_param_bit(is, PARAM_ISP_DMA2_OUTPUT);
	__fimc_is_hw_update_param(is, PARAM_ISP_DMA2_OUTPUT);

	ret = fimc_is_itf_s_param(is, false);
	if (ret < 0)
		return ret;

	ret = fimc_pipeline_call(&video->ve, set_stream, 1);
	if (ret < 0)
		return ret;

	set_bit(ST_ISP_VID_CAP_STREAMING, &isp->state);
	return ret;
}
Exemplo n.º 15
0
/**
 * @brief: get function supported of camera, the function is image adjust, color effect...
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *ctrl: V4L2 image effect control information structure
 */
static int ak_camera_g_volatile_ctrl(struct v4l2_ctrl *ctrl)
{
//	struct soc_camera_device *icd = ctrl_to_icd(ctrl);
//	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
//	struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
//	struct ak_camera_dev *pcdev = ici->priv;
	
	isp_dbg("entry %s, ctrl->id=%x\n", __func__, ctrl->id);

	switch (ctrl->id) {
	case V4L2_CID_BRIGHTNESS:
		isp_dbg("%s(): V4L2_CID_BRIGHTNESS\n", __func__);
		break;
	case V4L2_CID_CONTRAST:
		isp_dbg("%s(): V4L2_CID_CONTRAST\n", __func__);
		break;
	case V4L2_CID_SATURATION:
		isp_dbg("%s(): V4L2_CID_SATURATION\n", __func__);
		break;
	case V4L2_CID_SHARPNESS:
		isp_dbg("%s(): V4L2_CID_SHARPNESS\n", __func__);
		break;
	case V4L2_CID_HUE:
		break;
	case V4L2_CID_HUE_AUTO:
		break;
	case V4L2_CID_COLORFX:
		isp_dbg("%s(): V4L2_CID_COLORFX\n", __func__);
		break;
	case V4L2_CID_DO_WHITE_BALANCE:
		isp_dbg("%s(): V4L2_CID_DO_WHITE_BALANCE\n", __func__);
		break;
	case V4L2_CID_AUTO_WHITE_BALANCE:
		isp_dbg("%s(): V4L2_CID_AUTO_WHITE_BALANCE\n", __func__);
		break;
	}
	
	return 0;
}
Exemplo n.º 16
0
static int fimc_isp_subdev_get_fmt(struct v4l2_subdev *sd,
				   struct v4l2_subdev_fh *fh,
				   struct v4l2_subdev_format *fmt)
{
	struct fimc_isp *isp = v4l2_get_subdevdata(sd);
	struct v4l2_mbus_framefmt *mf = &fmt->format;

	if (fmt->which == V4L2_SUBDEV_FORMAT_TRY) {
		*mf = *v4l2_subdev_get_try_format(fh, fmt->pad);
		return 0;
	}

	mf->colorspace = V4L2_COLORSPACE_SRGB;

	mutex_lock(&isp->subdev_lock);

	if (fmt->pad == FIMC_ISP_SD_PAD_SINK) {
		/* ISP OTF input image format */
		*mf = isp->sink_fmt;
	} else {
		/* ISP OTF output image format */
		*mf = isp->src_fmt;

		if (fmt->pad == FIMC_ISP_SD_PAD_SRC_FIFO) {
			mf->colorspace = V4L2_COLORSPACE_JPEG;
			mf->code = V4L2_MBUS_FMT_YUV10_1X30;
		}
	}

	mutex_unlock(&isp->subdev_lock);

	isp_dbg(1, sd, "%s: pad%d: fmt: 0x%x, %dx%d\n", __func__,
		fmt->pad, mf->code, mf->width, mf->height);

	return 0;
}
Exemplo n.º 17
0
/**
 * @brief: setting image format information
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *icd: soc_camera_device information structure, 
 * akcamera depends on the soc driver.
 * @param [in] *f: image format
 */
static int ak_camera_set_fmt(struct soc_camera_device *icd,
			      struct v4l2_format *f)
{
	struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
	struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
	struct ak_camera_dev *pcdev = ici->priv;		
	const struct soc_camera_format_xlate *xlate;
	struct v4l2_pix_format *pix = &f->fmt.pix;
	struct v4l2_mbus_framefmt mf;
	struct v4l2_cropcap cropcap;
	int ret, buswidth;

	isp_dbg("entry %s\n", __func__);

	xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat);
	if (!xlate) {
		dev_warn(icd->parent, "Format %x not found\n",
			 pix->pixelformat);
		return -EINVAL;
	}

	//if (YUV_OUT)
		
	buswidth = xlate->host_fmt->bits_per_sample;
	if (buswidth > 10) {
		dev_warn(icd->parent,
			 "bits-per-sample %d for format %x unsupported\n",
			 buswidth, pix->pixelformat);
		return -EINVAL;
	}

	mf.width	= pix->width;
	mf.height	= pix->height;
	mf.field	= pix->field;
	mf.colorspace	= pix->colorspace;
	mf.code		= xlate->code;
	icd->current_fmt = xlate;

	v4l2_subdev_call(sd, video, cropcap, &cropcap);
	if ((mf.width > cropcap.bounds.width) 
		|| (mf.height > cropcap.bounds.height)) {
		/* D1 get from:
		  if cropcap.bounds = 720P, output by scale down
		  if cropcap.bounds = VGA, output by scale up 
		*/
		mf.width = cropcap.defrect.width;
		mf.height = cropcap.defrect.height;
	} else if ((mf.width < cropcap.defrect.width)
		|| (mf.height < cropcap.defrect.height)) {
		/*
		  hypothesis, sensor output least size is VGA. the defrect size is VGA
		  D2 and QVGA get from VGA scale down
		*/
		mf.width = cropcap.defrect.width;
		mf.height = cropcap.defrect.height;
	} 

	isp_dbg("%s. mf.width = %d, mf.height = %d\n", 
			__func__, mf.width, mf.height);

	ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf);
	if (ret < 0) 
		return ret;
	
	if (mf.code != xlate->code) 
		return -EINVAL;

	/* recored the VGA size used for check to enable isp ch2 when RGB input*/
	pcdev->isp.fmt_def_width = mf.width;
	pcdev->isp.fmt_def_height = mf.height;
	
	/*
	  * @fmt_width and fmt_height is the input image size of sensor.
	  * @chl1_width and chl1_height is the output image size for user.
	  */
	pcdev->isp.fmt_width = pix->width;
	pcdev->isp.fmt_height = pix->height;
	pcdev->isp.chl1_width = pix->width;
	pcdev->isp.chl1_height= pix->height;
	
	isp_set_cutter_window(&pcdev->isp, 0, 0, mf.width, mf.height);
	isp_set_channel1_scale(&pcdev->isp, pix->width, pix->height);
	
	isp_dbg("%s: chl1_width=%d, chl1_height=%d\n", __func__, 
					pcdev->isp.chl1_width, pcdev->isp.chl1_height);
	
	return ret;
}
Exemplo n.º 18
0
/**
 * @brief: Called when application apply buffers, camera buffer initial.
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *vq: V4L2  buffer queue information structure
 * @param [in] *vb: V4L2  buffer information structure
 * @param [in] field: V4L2_FIELD_ANY 
 */
static int ak_videobuf_prepare(struct videobuf_queue *vq,
			struct videobuf_buffer *vb, enum v4l2_field field)
{
	struct soc_camera_device *icd = vq->priv_data;
	struct ak_buffer *buf = container_of(vb, struct ak_buffer, vb);
	int ret;
	int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width,
						icd->current_fmt->host_fmt);

	isp_dbg("%s (vb=0x%p) buf[%d] vb->baddr=0x%08lx vb->bsize=%d bytes_per_line=%d\n",
			__func__, vb, vb->i, vb->baddr, vb->bsize, bytes_per_line);

	bytes_per_line = icd->user_width * 3 /2;

	if (bytes_per_line < 0)
		return bytes_per_line;

	/* Added list head initialization on alloc */
	WARN_ON(!list_empty(&vb->queue));

#if 0
//#ifdef ISP_DEBUG	
	/*	 
	* This can be useful if you want to see if we actually fill	 
	* the buffer with something	 
	*/
	memset((void *)vb->baddr, 0xaa, vb->bsize);
#endif

	BUG_ON(NULL == icd->current_fmt);
	
	/* I think, in buf_prepare you only have to protect global data,
	 * the actual buffer is yours */
	buf->inwork = 1;
	
	if (buf->code	!= icd->current_fmt->code ||
	    vb->width	!= icd->user_width ||
	    vb->height	!= icd->user_height ||
	    vb->field	!= field) {
		buf->code	= icd->current_fmt->code;
		vb->width	= icd->user_width;
		vb->height	= icd->user_height;
		vb->field	= field;
		vb->state	= VIDEOBUF_NEEDS_INIT;
	}

	vb->size = bytes_per_line * vb->height;
	if (0 != vb->baddr && vb->bsize < vb->size) {
		ret = -EINVAL;
		goto out;
	}

	if (vb->state == VIDEOBUF_NEEDS_INIT) {
		ret = videobuf_iolock(vq, vb, NULL);
		if (ret)
			goto fail;

		vb->state = VIDEOBUF_PREPARED;
	}

	buf->inwork = 0;

	return 0;
	
fail:
	free_buffer(vq, buf);
out:
	buf->inwork = 0;
	return ret;
}
Exemplo n.º 19
0
/**
 * @brief: Called when application apply buffers, camera start data collection
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *vq: V4L2  buffer queue information structure
 * @param [in] *vb: V4L2  buffer information structure
 */
static void ak_videobuf_queue(struct videobuf_queue *vq, 
								struct videobuf_buffer *vb)
{
	struct soc_camera_device *icd = vq->priv_data;
	struct soc_camera_host *ici = to_soc_camera_host(icd->parent);
	struct ak_camera_dev *pcdev = ici->priv;
	struct ak_buffer *buf = container_of(vb, struct ak_buffer, vb);
	u32 yaddr_chl1, yaddr_chl2, size;
	static int ch2_sync = 0;
	
	isp_dbg("%s (vb=0x%p) buf[%d] baddr = 0x%08lx, bsize = %d\n",
			__func__,  vb, vb->i, vb->baddr, vb->bsize);

	list_add_tail(&vb->queue, &pcdev->capture);

	vb->state = VIDEOBUF_ACTIVE;
	size = vb->width * vb->height;
	yaddr_chl1 = videobuf_to_dma_contig(vb); /* for mater channel */
	yaddr_chl2 = yaddr_chl1 + size * 3 / 2; /* for secondary channel */
	
	switch(pcdev->isp.cur_mode) {
	case ISP_YUV_OUT:
	case ISP_YUV_BYPASS:
	case ISP_RGB_OUT:	
		/* for single mode */
		if (!pcdev->active) {
			pcdev->active = buf;
			pcdev->dma_running = 1;	
			
			isp_set_even_frame(&pcdev->isp, yaddr_chl1, yaddr_chl2);
			isp_apply_mode(&pcdev->isp);
			isp_start_capturing(&pcdev->isp);
			
			isp_dbg("queue[single]: vbuf[%d] start run.\n", vb->i);
		}
		break;

	case ISP_YUV_VIDEO_OUT:
	case ISP_YUV_VIDEO_BYPASS:
	case ISP_RGB_VIDEO_OUT:
		/* for continous mode */
		if (!pcdev->active) {
			pcdev->active = buf;
			pcdev->dma_running = 0;
			ch2_sync = 1;
			
			isp_set_even_frame(&pcdev->isp, yaddr_chl1, yaddr_chl2);
			isp_dbg("queue[continue]: vbuf1[%d]\n", vb->i);
			return; 		
		}

		if (!pcdev->dma_running) {
			pcdev->dma_running = 1;

			if (ch2_sync) {
				ch2_sync = 0;
				irq_buf_empty_flag = 0;
				
				isp_set_odd_frame(&pcdev->isp, yaddr_chl1, yaddr_chl2);
				isp_apply_mode(&pcdev->isp);
				isp_start_capturing(&pcdev->isp);
				
				isp_dbg("queue[continue]: vbuf2[%d] start.\n", vb->i);
				return;
			}

			// ensure that can update yaddr immediately
			if (isp_is_capturing_odd(&pcdev->isp)) 
				isp_set_even_frame(&pcdev->isp, yaddr_chl1, yaddr_chl2);
			else 
				isp_set_odd_frame(&pcdev->isp, yaddr_chl1, yaddr_chl2);
			
		}
		break;
	default:
		printk("The working mode of ISP hasn't been initialized.\n");
	}
	if (pcdev->pdata->rf_led.pin > 0)
		{
	rfled_timer(pcdev);
		}
}
Exemplo n.º 20
0
/**
 * @brief: irq handler function, camera start data collection.
 * wake up wait queue.
 * 
 * @author: caolianming
 * @date: 2014-01-06
 * @param [in] *pcdev:ak camera drivers structure, include soc camera structure
 * @param [in] *vb: V4L2  buffer information structure
 * @param [in] *buf: ak camera drivers structure, include struct videobuf_buffer 
 */
static void ak_camera_wakeup(struct ak_camera_dev *pcdev,
			      struct videobuf_buffer *vb,
			      struct ak_buffer *buf)
{
	struct captureSync *adctime;
	struct timeval		cam_tv;
	unsigned long		adc_stamp;
	unsigned long		useconds;
	unsigned long long 	actuallyBytes = 0;
	
	isp_dbg("%s (vb=0x%p) buf[%d], baddr = 0x%08lx, bsize = %d\n",
			__func__,  vb, vb->i, vb->baddr, vb->bsize);

	do_gettimeofday(&cam_tv);
	
	adctime = getRecordSyncSamples();

	/* figure out the timestamp of frame */
	//adc_stamp = (unsigned long)(( adctime->adcCapture_bytes * 1000) / ( adctime->rate * ( adctime->frame_bits / 8 ) ) );
	actuallyBytes = adctime->adcCapture_bytes * (unsigned long long)1000;
	if ( actuallyBytes != 0 ) {
		do_div( actuallyBytes, ( adctime->rate * ( adctime->frame_bits / 8 ) ) );
		adc_stamp = actuallyBytes;
	}else { //if current no audio
		adc_stamp = 1000; //any value.
	}

	if (cam_tv.tv_sec > adctime->tv.tv_sec) {
		useconds = cam_tv.tv_usec + 1000000 - adctime->tv.tv_usec;
	} else {
		useconds = cam_tv.tv_usec - adctime->tv.tv_usec;
	}	

	vb->ts.tv_usec = (adc_stamp % 1000) * 1000 + useconds;

	if(vb->ts.tv_usec >= 1000000) {
		vb->ts.tv_sec = adc_stamp / 1000 + 1;
		vb->ts.tv_usec = vb->ts.tv_usec % 1000000;
	} else {
		vb->ts.tv_sec = adc_stamp / 1000;
	}	

	/* We don't have much to do if the capturing list is empty */
	if (list_empty(&pcdev->capture)) {
		pcdev->active = NULL;
		pcdev->dma_running = 0;
		
		//REG32(&pcdev->isp.base + ISP_PERI_PARA) &= ~(1 << 29);
		//REG32(&pcdev->isp.base + ISP_PERI_PARA) |= (1 << 28);
		
		isp_stop_capturing(&pcdev->isp);
		printk("isp-irq: vbuf queue is empty.\n");
		return;
	}

	if (!irq_buf_empty_flag) {
		list_del_init(&vb->queue);
		vb->state = VIDEOBUF_DONE;
		vb->field_count++;
		// here,  current frame commit to video_buffer layer
	 	wake_up(&vb->done);
		
		isp_dbg("wakeup (vb=0x%p) buf[%d], baddr = 0x%08lx, bsize = %d\n",
				vb, vb->i, vb->baddr, vb->bsize);
	}

   if(pcdev->isp.cur_mode_class == ISP_RGB_CLASS)
   	{
   	 
		/*33ms:read next frame params in 33ms later.*/
		schedule_delayed_work(&pcdev->isp.awb_work, msecs_to_jiffies(33));
		schedule_delayed_work(&pcdev->isp.ae_work, 0);
   	}

	pcdev->active = list_entry(pcdev->capture.next,
					   struct ak_buffer, vb.queue);

	ak_camera_setup_dma(pcdev);
}