/* videobuf operations */ static int unicam_videobuf_setup(struct vb2_queue *vq, const struct v4l2_format *fmt, unsigned int *count, unsigned int *numplanes, unsigned int sizes[], void *alloc_ctxs[]) { struct soc_camera_device *icd = soc_camera_from_vb2q(vq); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = (struct unicam_camera_dev *)ici->priv; int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, icd-> current_fmt->host_fmt); pr_debug("-enter"); if (bytes_per_line < 0) return bytes_per_line; *numplanes = 1; unicam_dev->sequence = 0; sizes[0] = bytes_per_line * icd->user_height; #if defined(CONFIG_VIDEOBUF2_DMA_CONTIG) alloc_ctxs[0] = unicam_dev->alloc_ctx; #endif if (!*count) *count = 2; pr_debug("no_of_buf=%d size=%u", *count, sizes[0]); pr_debug("-exit"); return 0; }
static void mx3_videobuf_release(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct mx3_camera_dev *mx3_cam = ici->priv; struct mx3_camera_buffer *buf = to_mx3_vb(vb); struct dma_async_tx_descriptor *txd = buf->txd; unsigned long flags; dev_dbg(icd->dev.parent, "Release%s DMA 0x%08x, queue %sempty\n", mx3_cam->active == buf ? " active" : "", sg_dma_address(&buf->sg), list_empty(&buf->queue) ? "" : "not "); spin_lock_irqsave(&mx3_cam->lock, flags); if (mx3_cam->active == buf) mx3_cam->active = NULL; /* Doesn't hurt also if the list is empty */ list_del_init(&buf->queue); buf->state = CSI_BUF_NEEDS_INIT; if (txd) { buf->txd = NULL; if (mx3_cam->idmac_channel[0]) async_tx_ack(txd); } spin_unlock_irqrestore(&mx3_cam->lock, flags); }
static void unicam_videobuf_queue(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = ici->priv; struct unicam_camera_buffer *buf = to_unicam_camera_vb(vb); unsigned long flags; dprintk("-enter"); dprintk("vb=0x%p vbuf=0x%p pbuf=0x%p size=%lu", vb, vb2_plane_vaddr(vb, 0), (void *)vb2_dma_contig_plane_dma_addr(vb, 0), vb2_get_plane_payload(vb, 0)); spin_lock_irqsave(&unicam_dev->lock, flags); list_add_tail(&buf->queue, &unicam_dev->capture); if (!unicam_dev->active) { unicam_dev->active = vb; /* use this buffer to trigger capture */ /* Configure HW only is streamon has been done * else only update active, HW would be configured * by streamon */ if(unicam_dev->streaming){ unicam_camera_update_buf(unicam_dev); if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2) unicam_camera_capture(unicam_dev); } } spin_unlock_irqrestore(&unicam_dev->lock, flags); dprintk("-exit"); }
static int unicam_videobuf_prepare(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, icd-> current_fmt->host_fmt); unsigned long size; pr_debug("-enter"); if (bytes_per_line < 0) return bytes_per_line; pr_debug("vb=0x%p buf=0x%p, size=%lu", vb, (void *)vb2_plane_dma_addr(vb, 0), vb2_get_plane_payload(vb, 0)); size = icd->user_height * bytes_per_line; if (vb2_plane_size(vb, 0) < size) { dev_err(icd->dev.parent, "Buffer too small (%lu < %lu)\n", vb2_plane_size(vb, 0), size); return -ENOBUFS; } vb2_set_plane_payload(vb, 0, size); pr_debug("-exit"); return 0; }
/* * Calculate the __buffer__ (not data) size and number of buffers. */ static int mx3_videobuf_setup(struct vb2_queue *vq, unsigned int *count, unsigned int *num_planes, unsigned long sizes[], void *alloc_ctxs[]) { struct soc_camera_device *icd = soc_camera_from_vb2q(vq); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct mx3_camera_dev *mx3_cam = ici->priv; int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, icd->current_fmt->host_fmt); if (bytes_per_line < 0) return bytes_per_line; if (!mx3_cam->idmac_channel[0]) return -EINVAL; *num_planes = 1; mx3_cam->sequence = 0; sizes[0] = bytes_per_line * icd->user_height; alloc_ctxs[0] = mx3_cam->alloc_ctx; if (!*count) *count = 32; if (sizes[0] * *count > MAX_VIDEO_MEM * 1024 * 1024) *count = MAX_VIDEO_MEM * 1024 * 1024 / sizes[0]; return 0; }
/* * .queue_setup() is called to check whether the driver can accept the requested * number of buffers and to fill in plane sizes for the current frame format if * required */ static int rcar_vin_videobuf_setup(struct vb2_queue *vq, const struct v4l2_format *fmt, unsigned int *count, unsigned int *num_planes, unsigned int sizes[], void *alloc_ctxs[]) { struct soc_camera_device *icd = soc_camera_from_vb2q(vq); struct soc_camera_host *ici = to_soc_camera_host(icd->parent); struct rcar_vin_priv *priv = ici->priv; if (fmt) { const struct soc_camera_format_xlate *xlate; unsigned int bytes_per_line; int ret; xlate = soc_camera_xlate_by_fourcc(icd, fmt->fmt.pix.pixelformat); if (!xlate) return -EINVAL; ret = soc_mbus_bytes_per_line(fmt->fmt.pix.width, xlate->host_fmt); if (ret < 0) return ret; bytes_per_line = max_t(u32, fmt->fmt.pix.bytesperline, ret); ret = soc_mbus_image_size(xlate->host_fmt, bytes_per_line, fmt->fmt.pix.height); if (ret < 0) return ret; sizes[0] = max_t(u32, fmt->fmt.pix.sizeimage, ret); } else { /* Called from VIDIOC_REQBUFS or in compatibility mode */ sizes[0] = icd->sizeimage; } alloc_ctxs[0] = priv->alloc_ctx; if (!vq->num_buffers) priv->sequence = 0; if (!*count) *count = 2; priv->vb_count = *count; *num_planes = 1; /* Number of hardware slots */ if (is_continuous_transfer(priv)) priv->nr_hw_slots = MAX_BUFFER_NUM; else priv->nr_hw_slots = 1; dev_dbg(icd->parent, "count=%d, size=%u\n", *count, sizes[0]); return 0; }
static void rcar_vin_stop_streaming(struct vb2_queue *vq) { struct soc_camera_device *icd = soc_camera_from_vb2q(vq); struct soc_camera_host *ici = to_soc_camera_host(icd->parent); struct rcar_vin_priv *priv = ici->priv; struct list_head *buf_head, *tmp; spin_lock_irq(&priv->lock); list_for_each_safe(buf_head, tmp, &priv->capture) list_del_init(buf_head); spin_unlock_irq(&priv->lock); }
static void rcar_vin_videobuf_release(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); struct soc_camera_host *ici = to_soc_camera_host(icd->parent); struct rcar_vin_priv *priv = ici->priv; unsigned int i; int buf_in_use = 0; spin_lock_irq(&priv->lock); /* Is the buffer in use by the VIN hardware? */ for (i = 0; i < MAX_BUFFER_NUM; i++) { if (priv->queue_buf[i] == vb) { buf_in_use = 1; break; } } if (buf_in_use) { while (priv->state != STOPPED) { /* issue stop if running */ if (priv->state == RUNNING) rcar_vin_request_capture_stop(priv); /* wait until capturing has been stopped */ if (priv->state == STOPPING) { priv->request_to_stop = true; spin_unlock_irq(&priv->lock); wait_for_completion(&priv->capture_stop); spin_lock_irq(&priv->lock); } } /* * Capturing has now stopped. The buffer we have been asked * to release could be any of the current buffers in use, so * release all buffers that are in use by HW */ for (i = 0; i < MAX_BUFFER_NUM; i++) { if (priv->queue_buf[i]) { vb2_buffer_done(priv->queue_buf[i], VB2_BUF_STATE_ERROR); priv->queue_buf[i] = NULL; } } } else { list_del_init(to_buf_list(vb)); } spin_unlock_irq(&priv->lock); }
static int unicam_videobuf_init(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = ici->priv; struct unicam_camera_buffer *buf = to_unicam_camera_vb(vb); unsigned long flags; spin_lock_irqsave(&unicam_dev->lock, flags); INIT_LIST_HEAD(&buf->queue); spin_unlock_irqrestore(&unicam_dev->lock, flags); buf->magic = UNICAM_BUF_MAGIC; return 0; }
static void unicam_videobuf_queue(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = ici->priv; struct unicam_camera_buffer *buf = to_unicam_camera_vb(vb); unsigned long flags; struct int_desc idesc; pr_debug("-enter"); pr_debug("vb=0x%p pbuf=0x%p size=%lu", vb, (void *)vb2_plane_dma_addr(vb, 0), vb2_get_plane_payload(vb, 0)); /* pr_info("Q 0x%x\n", vb2_plane_paddr(vb, 0)); */ spin_lock_irqsave(&unicam_dev->lock, flags); list_add_tail(&buf->queue, &unicam_dev->capture); if(unicam_dev->cap_mode && unicam_dev->cap_done){ pr_info("Cap mode and already captured\n"); spin_unlock_irqrestore(&unicam_dev->lock, flags); return; } if ((!unicam_dev->active)) { unicam_dev->active = vb; unicam_camera_update_buf(unicam_dev); if (atomic_read(&unicam_dev->streaming)) { mm_csi0_start_rx(); /* set data capture */ if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2) { idesc.fsi = 1; idesc.fei = 1; idesc.lci = 0; idesc.die = 1; idesc.dataline = 2; mm_csi0_config_int(&idesc, IMAGE_BUFFER); mm_csi0_config_int(&idesc, DATA_BUFFER); unicam_camera_capture(unicam_dev); } else { idesc.fsi = 0; idesc.fei = 0; idesc.lci = unicam_dev->icd->user_height; idesc.die = 0; idesc.dataline = 0; mm_csi0_config_int(&idesc, IMAGE_BUFFER); } } } spin_unlock_irqrestore(&unicam_dev->lock, flags); pr_debug("-exit"); }
int unicam_videobuf_start_streaming(struct vb2_queue *q, unsigned int count) { struct soc_camera_device *icd = soc_camera_from_vb2q(q); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = ici->priv; int ret = 0; /*atomic_set(&unicam_dev->retry_count, 0);*/ if (!atomic_read(&unicam_dev->streaming)) ret = unicam_videobuf_start_streaming_int(unicam_dev, count); else pr_err("unicam_videobuf_start_streaming: already started\n"); return ret; }
static void rcar_vin_videobuf_queue(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); struct soc_camera_host *ici = to_soc_camera_host(icd->parent); struct rcar_vin_priv *priv = ici->priv; unsigned long size; size = icd->sizeimage; if (vb2_plane_size(vb, 0) < size) { dev_err(icd->parent, "Buffer #%d too small (%lu < %lu)\n", vb->v4l2_buf.index, vb2_plane_size(vb, 0), size); goto error; } vb2_set_plane_payload(vb, 0, size); dev_dbg(icd->parent, "%s (vb=0x%p) 0x%p %lu\n", __func__, vb, vb2_plane_vaddr(vb, 0), vb2_get_plane_payload(vb, 0)); spin_lock_irq(&priv->lock); list_add_tail(to_buf_list(vb), &priv->capture); rcar_vin_fill_hw_slot(priv); /* If we weren't running, and have enough buffers, start capturing! */ if (priv->state != RUNNING && rcar_vin_hw_ready(priv)) { if (rcar_vin_setup(priv)) { /* Submit error */ list_del_init(to_buf_list(vb)); spin_unlock_irq(&priv->lock); goto error; } priv->request_to_stop = false; init_completion(&priv->capture_stop); priv->state = RUNNING; rcar_vin_capture(priv); } spin_unlock_irq(&priv->lock); return; error: vb2_buffer_done(vb, VB2_BUF_STATE_ERROR); }
static int mx3_videobuf_prepare(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct mx3_camera_dev *mx3_cam = ici->priv; struct idmac_channel *ichan = mx3_cam->idmac_channel[0]; struct scatterlist *sg; struct mx3_camera_buffer *buf; size_t new_size; int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, icd->current_fmt->host_fmt); if (bytes_per_line < 0) return bytes_per_line; buf = to_mx3_vb(vb); sg = &buf->sg; new_size = bytes_per_line * icd->user_height; if (vb2_plane_size(vb, 0) < new_size) { dev_err(icd->dev.parent, "Buffer too small (%lu < %zu)\n", vb2_plane_size(vb, 0), new_size); return -ENOBUFS; } if (buf->state == CSI_BUF_NEEDS_INIT) { sg_dma_address(sg) = vb2_dma_contig_plane_paddr(vb, 0); sg_dma_len(sg) = new_size; buf->txd = ichan->dma_chan.device->device_prep_slave_sg( &ichan->dma_chan, sg, 1, DMA_FROM_DEVICE, DMA_PREP_INTERRUPT); if (!buf->txd) return -EIO; buf->txd->callback_param = buf->txd; buf->txd->callback = mx3_cam_dma_done; buf->state = CSI_BUF_PREPARED; } vb2_set_plane_payload(vb, 0, new_size); return 0; }
int unicam_videobuf_stop_streaming(struct vb2_queue *q) { struct soc_camera_device *icd = soc_camera_from_vb2q(q); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = ici->priv; int ret = 0; if (atomic_read(&unicam_dev->streaming)) ret = unicam_videobuf_stop_streaming_int(unicam_dev); else pr_err("unicam_videobuf_start_streaming: already stopped\n"); /*atomic_set(&unicam_dev->retry_count, 0); del_timer_sync(&(unicam_dev->unicam_timer)); flush_work_sync(&unicam_dev->retry_work);*/ return ret; }
static void unicam_videobuf_release(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = ici->priv; struct unicam_camera_buffer *buf = to_unicam_camera_vb(vb); unsigned long flags; pr_debug("-enter"); pr_debug("vb=0x%p pbuf=0x%p size=%lu", vb, (void *)vb2_plane_dma_addr(vb, 0), vb2_get_plane_payload(vb, 0)); spin_lock_irqsave(&unicam_dev->lock, flags); if (buf->magic == UNICAM_BUF_MAGIC) list_del_init(&buf->queue); spin_unlock_irqrestore(&unicam_dev->lock, flags); pr_debug("-exit"); }
static void mx3_videobuf_queue(struct vb2_buffer *vb) { struct soc_camera_device *icd = soc_camera_from_vb2q(vb->vb2_queue); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct mx3_camera_dev *mx3_cam = ici->priv; struct mx3_camera_buffer *buf = to_mx3_vb(vb); struct dma_async_tx_descriptor *txd = buf->txd; struct idmac_channel *ichan = to_idmac_chan(txd->chan); struct idmac_video_param *video = &ichan->params.video; dma_cookie_t cookie; u32 fourcc = icd->current_fmt->host_fmt->fourcc; unsigned long flags; /* This is the configuration of one sg-element */ video->out_pixel_fmt = fourcc_to_ipu_pix(fourcc); if (video->out_pixel_fmt == IPU_PIX_FMT_GENERIC) { /* * If the IPU DMA channel is configured to transport * generic 8-bit data, we have to set up correctly the * geometry parameters upon the current pixel format. * So, since the DMA horizontal parameters are expressed * in bytes not pixels, convert these in the right unit. */ int bytes_per_line = soc_mbus_bytes_per_line(icd->user_width, icd->current_fmt->host_fmt); BUG_ON(bytes_per_line <= 0); video->out_width = bytes_per_line; video->out_height = icd->user_height; video->out_stride = bytes_per_line; } else { /* * For IPU known formats the pixel unit will be managed * successfully by the IPU code */ video->out_width = icd->user_width; video->out_height = icd->user_height; video->out_stride = icd->user_width; } #ifdef DEBUG /* helps to see what DMA actually has written */ if (vb2_plane_vaddr(vb, 0)) memset(vb2_plane_vaddr(vb, 0), 0xaa, vb2_get_plane_payload(vb, 0)); #endif spin_lock_irqsave(&mx3_cam->lock, flags); list_add_tail(&buf->queue, &mx3_cam->capture); if (!mx3_cam->active) mx3_cam->active = buf; spin_unlock_irq(&mx3_cam->lock); cookie = txd->tx_submit(txd); dev_dbg(icd->dev.parent, "Submitted cookie %d DMA 0x%08x\n", cookie, sg_dma_address(&buf->sg)); if (cookie >= 0) return; spin_lock_irq(&mx3_cam->lock); /* Submit error */ list_del_init(&buf->queue); if (mx3_cam->active == buf) mx3_cam->active = NULL; spin_unlock_irqrestore(&mx3_cam->lock, flags); vb2_buffer_done(vb, VB2_BUF_STATE_ERROR); }
int unicam_videobuf_stop_streaming(struct vb2_queue *q) { struct soc_camera_device *icd = soc_camera_from_vb2q(q); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct unicam_camera_dev *unicam_dev = ici->priv; CSL_CAM_FRAME_st_t cslCamFrame; int ret = 0; unsigned long flags; if (down_interruptible(&unicam_dev->stop_processing_sem) == 0) { if (unicam_dev->streaming) csl_cam_register_display(unicam_dev->cslCamHandle); } else dev_err(unicam_dev->dev, "Unable to dump regs because stop_processing_sem acquire failed\n"); /* grab the lock */ spin_lock_irqsave(&unicam_dev->lock, flags); dprintk("-enter"); dprintk("disabling csi"); iprintk("stopping stream"); if (!unicam_dev->streaming) { dev_err(unicam_dev->dev, "stream already turned off\n"); goto out; } /* * stop streaming before grabing spin lock * since this function can sleep. * */ if (unicam_dev->active) { unicam_dev->stopping = true; spin_unlock_irqrestore(&unicam_dev->lock, flags); ret = down_timeout(&unicam_dev->stop_sem, msecs_to_jiffies(500)); if (ret == -ETIME) pr_err("Unicam: semaphore timed out waiting to STOP\n"); } else { spin_unlock_irqrestore(&unicam_dev->lock, flags); } usleep_range(50, 60); /*TODO: Need to double-check with ASIC team*/ spin_lock_irqsave(&unicam_dev->lock, flags); /* disable frame interrupts */ cslCamFrame.int_enable = CSL_CAM_INT_DISABLE; cslCamFrame.int_line_count = 0; cslCamFrame.capture_mode = UNICAM_CAPTURE_MODE; cslCamFrame.capture_size = 0; if (csl_cam_set_frame_control(unicam_dev->cslCamHandle, &cslCamFrame)) { dev_err(unicam_dev->dev, "csl_cam_set_frame_control(): FAILED\n"); ret = -1; } /* disable receiver */ if (csl_cam_rx_stop(unicam_dev->cslCamHandle)) { dev_err(unicam_dev->dev, "csl_cam_rx_stop(): FAILED\n"); ret = -1; } if (csl_cam_close(unicam_dev->cslCamHandle)) { dev_err(unicam_dev->dev, "cals_cam_exit(): FAILED\n"); ret = -1; } if (csl_cam_exit()) { dev_err(unicam_dev->dev, "csl_cam_exit(): FAILED\n"); ret = -1; } unicam_dev->active = NULL; unicam_dev->streaming = 0; out: dprintk("-exit"); spin_unlock_irqrestore(&unicam_dev->lock, flags); up(&unicam_dev->stop_processing_sem); /* Stopping stream after stopping unicam */ ret = v4l2_subdev_call(sd, video, s_stream, 0); if (ret < 0 && ret != -ENOIOCTLCMD) { dev_err(unicam_dev->dev, "failed to stop sensor streaming\n"); ret = -1; } return ret; }
int unicam_videobuf_start_streaming(struct vb2_queue *q, unsigned int count) { struct soc_camera_device *icd = soc_camera_from_vb2q(q); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = ici->priv; struct v4l2_subdev_sensor_interface_parms if_params; struct v4l2_subdev *sd = soc_camera_to_subdev(icd); int ret; int thumb; unsigned long flags; CSL_CAM_INTF_CFG_st_t csl_cam_intf_cfg_st; CSL_CAM_LANE_CONTROL_st_t cslCamLaneCtrl_st; CSL_CAM_PIPELINE_st_t cslCamPipeline; CSL_CAM_IMAGE_ID_st_t cslCamImageCtrl; CSL_CAM_DATA_st_t cslCamDataCtrl; CSL_CAM_FRAME_st_t cslCamFrame; dprintk("-enter"); iprintk("enabling csi"); spin_lock_irqsave(&unicam_dev->lock, flags); unicam_dev->stopping = false; spin_unlock_irqrestore(&unicam_dev->lock, flags); if (csl_cam_init()) { dev_err(unicam_dev->dev, "error initializing csl camera\n"); return -1; } ret = v4l2_subdev_call(sd, video, s_stream, 1); if (ret < 0 && ret != -ENOIOCTLCMD) { dev_err(unicam_dev->dev, "error on s_stream(%d)\n", ret); spin_lock_irqsave(&unicam_dev->lock, flags); unicam_dev->active = NULL; unicam_dev->stopping = true; spin_unlock_irqrestore(&unicam_dev->lock, flags); if (csl_cam_exit()) dev_err(unicam_dev->dev, "csl_cam_exit(): FAILED\n"); return ret; } /* get the sensor interface information */ ret = v4l2_subdev_call(sd, sensor, g_interface_parms, &if_params); if (ret < 0) { dev_err(unicam_dev->dev, "error on g_inferface_params(%d)\n", ret); return ret; } unicam_dev->if_params = if_params; /* set camera interface parameters */ memset(&csl_cam_intf_cfg_st, 0, sizeof(CSL_CAM_INTF_CFG_st_t)); /* we only support serial and csi2 sensor */ if ((unicam_dev->if_params.if_type == V4L2_SUBDEV_SENSOR_SERIAL) && (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2)) { csl_cam_intf_cfg_st.intf = CSL_CAM_INTF_CSI; } else if ((unicam_dev->if_params.if_type == V4L2_SUBDEV_SENSOR_SERIAL) && (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI1)) { csl_cam_intf_cfg_st.intf = CSL_CAM_INTF_CCP; } else { dev_err(unicam_dev->dev, "CSI2 iface only supported,requested iface %d mode=%d\n", unicam_dev->if_params.if_type, unicam_dev->if_params.if_mode); return -EINVAL; } if (unicam_dev->if_params.parms.serial.channel == 0) csl_cam_intf_cfg_st.afe_port = CSL_CAM_PORT_AFE_0; else if (unicam_dev->if_params.parms.serial.channel == 1) csl_cam_intf_cfg_st.afe_port = CSL_CAM_PORT_AFE_1; else { dev_err(unicam_dev->dev, "receiver only supports two channels, request channel=%d\n", unicam_dev->if_params.parms.serial.channel); return -EINVAL; } csl_cam_intf_cfg_st.frame_time_out = 1000; /* open camera interface */ csl_cam_intf_cfg_st.p_cpi_intf_st = NULL; if ((unicam_dev->if_params.if_type == V4L2_SUBDEV_SENSOR_SERIAL) && (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2)) { if (unicam_dev->if_params.parms.serial.lanes == 1) csl_cam_intf_cfg_st.input_mode = CSL_CAM_INPUT_SINGLE_LANE; else if (unicam_dev->if_params.parms.serial.lanes == 2) csl_cam_intf_cfg_st.input_mode = CSL_CAM_INPUT_DUAL_LANE; else { dev_err(unicam_dev->dev, "receiver only supports max 2 lanes, requested lanes(%d)\n", unicam_dev->if_params.parms.serial.lanes); return -EINVAL; } } else { csl_cam_intf_cfg_st.input_mode = CSL_CAM_INPUT_MODE_DATA_CLOCK; } if (csl_cam_open(&csl_cam_intf_cfg_st, &unicam_dev->cslCamHandle)) { dev_err(unicam_dev->dev, "%s: csl_cam_open(): ERROR\n", __func__); return -1; } /* set data lane timing */ cslCamLaneCtrl_st.lane_select = CSL_CAM_DATA_LANE_0; cslCamLaneCtrl_st.lane_control = CSL_CAM_LANE_HS_TERM_TIME; cslCamLaneCtrl_st.param = unicam_dev->if_params.parms.serial.hs_term_time; dprintk("hs_term_time is set to = %d\n", cslCamLaneCtrl_st.param); if (csl_cam_set_lane_control (unicam_dev->cslCamHandle, &cslCamLaneCtrl_st)) { dev_err(unicam_dev->dev, "csl_cam_set_lane_control(): FAILED\n"); return -1; } if (unicam_dev->if_params.parms.serial.hs_settle_time != 0) { cslCamLaneCtrl_st.lane_select = CSL_CAM_DATA_LANE_0; cslCamLaneCtrl_st.lane_control = CSL_CAM_LANE_HS_SETTLE_TIME; cslCamLaneCtrl_st.param = unicam_dev->if_params.parms.serial.hs_settle_time; dprintk("hs_settle_time is set to = %d\n", cslCamLaneCtrl_st.param); if (csl_cam_set_lane_control (unicam_dev->cslCamHandle, &cslCamLaneCtrl_st)) { dev_err(unicam_dev->dev, "csl_cam_set_lane_control(): FAILED\n"); return -1; } } /* pipelince decode */ cslCamPipeline.decode = CSL_CAM_DEC_NONE; cslCamPipeline.unpack = CSL_CAM_PIXEL_NONE; cslCamPipeline.pack = CSL_CAM_PIXEL_NONE; cslCamPipeline.dec_adv_predictor = FALSE; cslCamPipeline.encode = CSL_CAM_ENC_NONE; cslCamPipeline.enc_adv_predictor = FALSE; cslCamPipeline.encode_blk_size = 0x0000; /* set pipeline */ if (csl_cam_set_pipeline_control (unicam_dev->cslCamHandle, &cslCamPipeline)) { dev_err(unicam_dev->dev, "csl_cam_set_pipeline_control(): FAILE\n"); return -1; } /* set image identifier (CSI mode only) */ memset(&cslCamImageCtrl, 0, sizeof(CSL_CAM_IMAGE_ID_st_t)); /* if thumbnail is supported we expect * thumbnail to be in image ptr format of thumbnails is yuv422 * format is checked in try format. * in case where thumbnail is not supported we get jpeg * image in data pointer. so we set the id as 0 */ thumb = 0; ret = v4l2_subdev_call(sd, core, ioctl, VIDIOC_THUMB_SUPPORTED, (void *)&thumb); if (ret < 0) dev_warn(unicam_dev->dev, "sensor returns error(%d) for VIDIOC_THUMB_SUPPORTED\n", ret); if ((icd->current_fmt->code == V4L2_MBUS_FMT_JPEG_1X8) && (thumb == 0)) cslCamImageCtrl.image_data_id0 = 0x0; /* thumbnail not supported */ else cslCamImageCtrl.image_data_id0 = 0x1E; if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI1) cslCamImageCtrl.image_data_id0 = 0x0; /* CCP2 channel ID 0 */ if (csl_cam_set_image_type_control (unicam_dev->cslCamHandle, &cslCamImageCtrl)) { dev_err(unicam_dev->dev, "csl_cam_set_image_type_control(): FAILED\n"); return -1; } /* set data capture */ cslCamDataCtrl.int_enable = (CSL_CAM_INTERRUPT_t) (CSL_CAM_INT_DISABLE); if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2) { cslCamDataCtrl.line_count = 2; cslCamDataCtrl.fsp_decode_enable = FALSE; } else { cslCamDataCtrl.line_count = 0; cslCamDataCtrl.fsp_decode_enable = TRUE; } cslCamDataCtrl.data_id = 0x00; cslCamDataCtrl.data_size = CSL_CAM_PIXEL_8BIT; if (csl_cam_set_data_type_control (unicam_dev->cslCamHandle, &cslCamDataCtrl)) { dev_err(unicam_dev->dev, "csl_cam_set_data_type_control(): FAILED\n"); return -1; } /* start receiver */ if (csl_cam_rx_start(unicam_dev->cslCamHandle)) { dev_err(unicam_dev->dev, "csl_cam_rx_start(): FAILED\n"); return -1; } /* Enabling sensor after enabling unicam */ /* ret = v4l2_subdev_call(sd, video, s_stream, 1); if (ret < 0 && ret != -ENOIOCTLCMD) { dev_err(unicam_dev->dev, "error on s_stream(%d)\n", ret); return ret; } */ if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI1) { cslCamFrame.int_enable = CSL_CAM_INT_LINE_COUNT; /* CSL_CAM_INT_FRAME_END | CSL_CAM_INT_FRAME_START; //CSL_CAM_INT_LINE_COUNT; cslCamFrame.int_line_count = (unicam_dev->icd->user_height - 1);*/ cslCamFrame.int_line_count = (unicam_dev->icd->user_height); cslCamFrame.capture_mode = CSL_CAM_CAPTURE_MODE_NORMAL; /* CSL_CAM_CAPTURE_MODE_NORMAL */ if (csl_cam_set_frame_control( unicam_dev->cslCamHandle, &cslCamFrame)) { dev_err(unicam_dev->dev, "csl_cam_set_frame_control(): FAILED\n"); return -1; } } /* Configure HW if buffer is queued ahead of streamon */ spin_lock_irqsave(&unicam_dev->lock, flags); if(unicam_dev->active){ unicam_camera_update_buf(unicam_dev); if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2) unicam_camera_capture(unicam_dev); } unicam_dev->streaming = 1; spin_unlock_irqrestore(&unicam_dev->lock, flags); csl_cam_register_display(unicam_dev->cslCamHandle); dprintk("-exit"); return 0; }