static int mx2_camera_set_crop(struct soc_camera_device *icd, struct v4l2_crop *a) { struct v4l2_rect *rect = &a->c; struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_mbus_framefmt mf; int ret; soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096); soc_camera_limit_side(&rect->top, &rect->height, 0, 2, 4096); ret = v4l2_subdev_call(sd, video, s_crop, a); if (ret < 0) return ret; /* The capture device might have changed its output */ ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf); if (ret < 0) return ret; dev_dbg(icd->parent, "Sensor cropped %dx%d\n", mf.width, mf.height); icd->user_width = mf.width; icd->user_height = mf.height; return ret; }
static int ak_camera_set_crop(struct soc_camera_device *icd, struct v4l2_crop *crop) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct soc_camera_host *ici = to_soc_camera_host(icd->parent); struct ak_camera_dev *pcdev = ici->priv; int ret, width, height; isp_dbg("entry %s\n", __func__); if (pcdev->dma_running) { /* make sure streaming is not started */ v4l2_err(&ici->v4l2_dev, "Cannot change crop when streaming is ON\n"); return -EBUSY; } width = crop->c.width - crop->c.left; height = crop->c.height - crop->c.top; if ((crop->c.top < 0 || crop->c.left < 0) ||(((width * 3) < 18) || (height * 3) < 18) ||((width > 1280) || (height > 720))) { v4l2_err(&ici->v4l2_dev, "doesn't support negative values for top & left\n"); return -EINVAL; } if ((ret = isp_set_crop(&pcdev->isp, crop->c)) < 0) ret = v4l2_subdev_call(sd, video, s_crop, crop); return ret; }
static int get_scales(struct soc_camera_device *icd, unsigned int *scale_h, unsigned int *scale_v) { struct sh_mobile_ceu_cam *cam = icd->host_priv; struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_crop cam_crop; unsigned int width_in, height_in; int ret; cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ret = client_g_rect(sd, &cam_crop.c); if (ret < 0) return ret; ret = get_camera_scales(sd, &cam_crop.c, scale_h, scale_v); if (ret < 0) return ret; width_in = scale_up(cam->ceu_rect.width, *scale_h); height_in = scale_up(cam->ceu_rect.height, *scale_v); *scale_h = calc_generic_scale(width_in, icd->user_width); *scale_v = calc_generic_scale(height_in, icd->user_height); return 0; }
static int unicam_videobuf_stop_streaming_int(struct unicam_camera_dev \ *unicam_dev) { struct soc_camera_device *icd = unicam_dev->icd; struct v4l2_subdev *sd = soc_camera_to_subdev(icd); int ret = 0; unsigned long flags; struct rx_stat_list rx; /* grab the lock */ spin_lock_irqsave(&unicam_dev->lock, flags); pr_debug("-enter"); pr_debug("disabling csi"); pr_debug("stopping stream"); if (!atomic_read(&unicam_dev->streaming)) { pr_err("stream already turned off\n"); goto out; } if (unicam_dev->active) { atomic_set(&unicam_dev->stopping, 1); spin_unlock_irqrestore(&unicam_dev->lock, flags); ret = down_timeout(&unicam_dev->stop_sem, msecs_to_jiffies(500)); atomic_set(&unicam_dev->stopping, 0); if (ret == -ETIME) { pr_err("Unicam: semaphore timed out waiting to STOP\n"); unicam_reg_dump(); } } else { spin_unlock_irqrestore(&unicam_dev->lock, flags); } usleep_range(50, 60); /*TODO: Need to double-check with ASIC team*/ spin_lock_irqsave(&unicam_dev->lock, flags); unicam_stop(); /* Restart rx stat */ mm_csi0_get_rx_stat(&rx, 1); /* Don't bother what values were returned */ mm_csi0_teardown(); unicam_dev->active = NULL; atomic_set(&unicam_dev->streaming, 0); memset(&unicam_dev->crop, 0x00, sizeof(struct v4l2_crop)); unicam_dev->cap_done = 0; unicam_dev->cap_mode = 0; out: pr_debug("-exit"); atomic_set(&unicam_dev->cam_triggered, 0); spin_unlock_irqrestore(&unicam_dev->lock, flags); /* stop sensor streaming after UNICAM is disabled */ ret = v4l2_subdev_call(sd, video, s_stream, 0); if (ret < 0 && ret != -ENOIOCTLCMD) { pr_err("failed to stop sensor streaming\n"); ret = -1; } return ret; }
static int omap1_cam_get_formats(struct soc_camera_device *icd, unsigned int idx, struct soc_camera_format_xlate *xlate) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct device *dev = icd->dev.parent; int formats = 0, ret; enum v4l2_mbus_pixelcode code; const struct soc_mbus_pixelfmt *fmt; ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code); if (ret < 0) /* No more formats */ return 0; fmt = soc_mbus_get_fmtdesc(code); if (!fmt) { dev_warn(dev, "%s: unsupported format code #%d: %d\n", __func__, idx, code); return 0; } /* Check support for the requested bits-per-sample */ if (fmt->bits_per_sample != 8) return 0; switch (code) { case V4L2_MBUS_FMT_YUYV8_2X8: case V4L2_MBUS_FMT_YVYU8_2X8: case V4L2_MBUS_FMT_UYVY8_2X8: case V4L2_MBUS_FMT_VYUY8_2X8: case V4L2_MBUS_FMT_RGB555_2X8_PADHI_BE: case V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE: case V4L2_MBUS_FMT_RGB565_2X8_BE: case V4L2_MBUS_FMT_RGB565_2X8_LE: formats++; if (xlate) { xlate->host_fmt = soc_mbus_find_fmtdesc(code, omap1_cam_formats, ARRAY_SIZE(omap1_cam_formats)); xlate->code = code; xlate++; dev_dbg(dev, "%s: providing format %s as byte swapped code #%d\n", __func__, xlate->host_fmt->name, code); } default: if (xlate) dev_dbg(dev, "%s: providing format %s in pass-through mode\n", __func__, fmt->name); } formats++; if (xlate) { xlate->host_fmt = fmt; xlate->code = code; xlate++; } return formats; }
static int mx1_camera_set_crop(struct soc_camera_device *icd, struct v4l2_crop *a) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); return v4l2_subdev_call(sd, video, s_crop, a); }
/** * @brief: Called when the /dev/videox is close. close ISP and sensor device. * * @author: caolianming * @date: 2014-01-06 * @param [in] *icd: soc_camera_device information structure, * akcamera depends on the soc driver. */ static void ak_camera_remove_device(struct soc_camera_device *icd) { struct soc_camera_host *ici = to_soc_camera_host(icd->parent); struct ak_camera_dev *pcdev = ici->priv; struct v4l2_subdev *sd = soc_camera_to_subdev(icd); CAMDBG("entry %s\n", __func__); BUG_ON(icd != pcdev->icd); v4l2_subdev_call(sd, core, reset, 0); isp_clear_irq(&pcdev->isp); isp_stop_capturing(&pcdev->isp); /* disable sensor clk */ clk_disable(pcdev->cis_sclk); /* disable the clock of isp module */ clk_disable(pcdev->clk); //ak_soft_reset(AK_SRESET_CAMERA); dev_info(icd->parent, "AK Camera driver detached from camera %d\n", icd->devnum); pcdev->active = NULL; pcdev->icd = NULL; CAMDBG("Leave %s\n", __func__); }
static int mx1_camera_try_fmt(struct soc_camera_device *icd, struct v4l2_format *f) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); const struct soc_camera_format_xlate *xlate; struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_mbus_framefmt mf; int ret; /* TODO: limit to mx1 hardware capabilities */ xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); if (!xlate) { dev_warn(icd->parent, "Format %x not found\n", pix->pixelformat); return -EINVAL; } mf.width = pix->width; mf.height = pix->height; mf.field = pix->field; mf.colorspace = pix->colorspace; mf.code = xlate->code; /* limit to sensor capabilities */ ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf); if (ret < 0) return ret; pix->width = mf.width; pix->height = mf.height; pix->field = mf.field; pix->colorspace = mf.colorspace; return 0; }
/** * @brief: getting image format information * * @author: caolianming * @date: 2014-01-06 * @param [in] *icd: soc_camera_device information structure, * akcamera depends on the soc driver. * @param [in] *f: image format */ static int ak_camera_get_formats(struct soc_camera_device *icd, unsigned int idx, struct soc_camera_format_xlate *xlate) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct device *dev = icd->parent; struct soc_camera_host *ici = to_soc_camera_host(dev); struct ak_camera_dev *pcdev = ici->priv; int ret, formats = 0; enum v4l2_mbus_pixelcode code; const struct soc_mbus_pixelfmt *fmt; CAMDBG("entry %s\n", __func__); ret = v4l2_subdev_call(sd, video, enum_mbus_fmt, idx, &code); if (ret < 0) /* No more formats */ return 0; /* * @Note: ISP only support yuv420 output and jpeg out. * FIXME1: We miss jpeg here. * FIXME2: the output squence of YUV is actually UYVY. */ fmt = soc_mbus_get_fmtdesc(V4L2_MBUS_FMT_YUYV8_2X8); if (!fmt) { dev_warn(dev, "unsupported format code #%u: %d\n", idx, code); return 0; } CAMDBG("get format %s code=%d from sensor\n", fmt->name, code); /* Generic pass-through */ formats++; if (xlate) { xlate->host_fmt = fmt; xlate->code = code; xlate++; /* * @decide the default working mode of isp * @prefer RGB mode */ if (code < V4L2_MBUS_FMT_Y8_1X8) { pcdev->def_mode = ISP_RGB_VIDEO_OUT; //pcdev->def_mode = ISP_RGB_OUT; } if ((pcdev->def_mode != ISP_RGB_VIDEO_OUT) && (pcdev->def_mode != ISP_RGB_OUT)) { pcdev->def_mode = ISP_YUV_VIDEO_BYPASS; //pcdev->def_mode = ISP_YUV_BYPASS; } pcdev->isp.cur_mode = pcdev->def_mode; update_cur_mode_class(&pcdev->isp); dev_dbg(dev, "Providing format %s in pass-through mode\n", fmt->name); } return formats; }
static int unicam_camera_set_fmt(struct soc_camera_device *icd, struct v4l2_format *f) { struct device *dev = icd->dev.parent; struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = ici->priv; const struct soc_camera_format_xlate *xlate = NULL; struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_mbus_framefmt mf; int ret; u32 skip_frames = 0; dprintk("-enter"); xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); if (!xlate) { dev_warn(dev, "Format %x not found\n", pix->pixelformat); return -EINVAL; } mf.width = pix->width; mf.height = pix->height; mf.field = pix->field; mf.colorspace = pix->colorspace; mf.code = xlate->code; ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf); if (mf.code != xlate->code) return -EINVAL; if (ret < 0) { dev_warn(dev, "Failed to configure for format %x\n", pix->pixelformat); return ret; } /*TODO limit here any maximum size */ ret = v4l2_subdev_call(sd, sensor, g_skip_frames, &skip_frames); if (ret < 0) { dev_warn(dev, "sensor driver doesn't implement g_skip_frames operation\n"); dev_warn(dev, "assuming zero skip frames\n"); skip_frames = 0; ret = 0; } unicam_dev->skip_frames = skip_frames; pix->width = mf.width; pix->height = mf.height; pix->field = mf.field; pix->colorspace = mf.colorspace; icd->current_fmt = xlate; iprintk("format set to %c%c%c%c res=%dx%d success=%d", pixfmtstr(pix->pixelformat), pix->width, pix->height, ret); dprintk("-exit"); return ret; }
static int unicam_camera_set_crop(struct soc_camera_device *icd, struct v4l2_crop *crop) { struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct unicam_camera_dev *unicam_dev = ici->priv; if(crop == NULL) return -EINVAL; unicam_dev->crop = *crop; return 0; #if 0 struct v4l2_subdev *sd = soc_camera_to_subdev(icd); pr_info("Configuring crop to %d %d\n", crop->c.width, crop->c.height); pr_info("Configuring top left to %d %d\n", crop->c.top, crop->c.left); v4l2_subdev_call(sd, video, s_stream, 0); spin_lock_irqsave(&unicam_dev->lock, flags); unicam_dev->crop = *crop; if(unicam_dev->streaming){ pr_info("Stopping stream\n"); unicam_stop(); } /* Configure new crop parameters */ mm_csi0_set_windowing_vertical(unicam_dev->crop.c.top, (unicam_dev->crop.c.top + unicam_dev->crop.c.height)); mm_csi0_cfg_pipeline_unpack(PIX_UNPACK_NONE); mm_csi0_cfg_pipeline_dpcm_dec(DPCM_DEC_NONE); mm_csi0_set_windowing_horizontal(unicam_dev->crop.c.left, (unicam_dev->crop.c.left + unicam_dev->crop.c.width)); mm_csi0_cfg_pipeline_dpcm_enc(DPCM_ENC_NONE); mm_csi0_cfg_pipeline_pack(PIX_PACK_NONE); mm_csi0_start_rx(); /* Re-configure buffer parameters */ unicam_camera_update_buf(unicam_dev); /* set data capture */ if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2) { idesc.fsi = 1; idesc.fei = 1; idesc.lci = 0; idesc.die = 1; idesc.dataline = 2; mm_csi0_config_int(&idesc, IMAGE_BUFFER); mm_csi0_config_int(&idesc, DATA_BUFFER); unicam_camera_capture(unicam_dev); } else { idesc.fsi = 0; idesc.fei = 0; idesc.lci = unicam_dev->icd->user_height; idesc.die = 0; idesc.dataline = 0; mm_csi0_config_int(&idesc, IMAGE_BUFFER); } spin_unlock_irqrestore(&unicam_dev->lock, flags); v4l2_subdev_call(sd, video, s_stream, 1); return 0; #endif }
static int mx3_camera_try_fmt(struct soc_camera_device *icd, struct v4l2_format *f) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); const struct soc_camera_format_xlate *xlate; struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_mbus_framefmt mf; __u32 pixfmt = pix->pixelformat; int ret; xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); if (pixfmt && !xlate) { dev_warn(icd->dev.parent, "Format %x not found\n", pixfmt); return -EINVAL; } /* limit to MX3 hardware capabilities */ if (pix->height > 4096) pix->height = 4096; if (pix->width > 4096) pix->width = 4096; pix->bytesperline = soc_mbus_bytes_per_line(pix->width, xlate->host_fmt); if (pix->bytesperline < 0) return pix->bytesperline; pix->sizeimage = pix->height * pix->bytesperline; /* limit to sensor capabilities */ mf.width = pix->width; mf.height = pix->height; mf.field = pix->field; mf.colorspace = pix->colorspace; mf.code = xlate->code; ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf); if (ret < 0) return ret; pix->width = mf.width; pix->height = mf.height; pix->colorspace = mf.colorspace; switch (mf.field) { case V4L2_FIELD_ANY: pix->field = V4L2_FIELD_NONE; break; case V4L2_FIELD_NONE: break; default: dev_err(icd->dev.parent, "Field type %d unsupported.\n", mf.field); ret = -EINVAL; } return ret; }
static int gc0329_suspend(struct soc_camera_device *icd, pm_message_t state) { struct v4l2_subdev *sd; struct gc0329_priv *priv; sd = soc_camera_to_subdev(icd); priv = container_of(sd, struct gc0329_priv, subdev); return 0; }
static int ak_camera_get_crop(struct soc_camera_device *icd, struct v4l2_crop *crop) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); //struct soc_camera_host *ici = to_soc_camera_host(icd->parent); //struct ak_camera_dev *pcdev = ici->priv; isp_dbg("entry %s\n", __func__); return v4l2_subdev_call(sd, video, g_crop, crop); }
static int client_s_fmt(struct soc_camera_device *icd, struct v4l2_format *f, bool ceu_can_scale) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct device *dev = icd->dev.parent; struct v4l2_pix_format *pix = &f->fmt.pix; unsigned int width = pix->width, height = pix->height, tmp_w, tmp_h; unsigned int max_width, max_height; struct v4l2_cropcap cap; int ret; cap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ret = v4l2_subdev_call(sd, video, cropcap, &cap); if (ret < 0) return ret; max_width = min(cap.bounds.width, 2560); max_height = min(cap.bounds.height, 1920); ret = v4l2_subdev_call(sd, video, s_fmt, f); if (ret < 0) return ret; dev_geo(dev, "camera scaled to %ux%u\n", pix->width, pix->height); if ((width == pix->width && height == pix->height) || !ceu_can_scale) return 0; tmp_w = pix->width; tmp_h = pix->height; while ((width > tmp_w || height > tmp_h) && tmp_w < max_width && tmp_h < max_height) { tmp_w = min(2 * tmp_w, max_width); tmp_h = min(2 * tmp_h, max_height); pix->width = tmp_w; pix->height = tmp_h; ret = v4l2_subdev_call(sd, video, s_fmt, f); dev_geo(dev, "Camera scaled to %ux%u\n", pix->width, pix->height); if (ret < 0) { dev_err(dev, "Client failed to set format: %d\n", ret); return ret; } } return 0; }
static int ak_camera_cropcap(struct soc_camera_device *icd, struct v4l2_cropcap *crop) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); isp_dbg("enter %s\n", __func__); if (crop->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) return -EINVAL; // isp support crop, need complete. return v4l2_subdev_call(sd, video, cropcap, crop); }
static int omap1_cam_set_crop(struct soc_camera_device *icd, const struct v4l2_crop *crop) { const struct v4l2_rect *rect = &crop->c; const struct soc_camera_format_xlate *xlate = icd->current_fmt; struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct device *dev = icd->parent; struct soc_camera_host *ici = to_soc_camera_host(dev); struct omap1_cam_dev *pcdev = ici->priv; struct v4l2_subdev_format fmt = { .which = V4L2_SUBDEV_FORMAT_ACTIVE, }; struct v4l2_mbus_framefmt *mf = &fmt.format; int ret; ret = subdev_call_with_sense(pcdev, dev, icd, sd, video, s_crop, crop); if (ret < 0) { dev_warn(dev, "%s: failed to crop to %ux%u@%u:%u\n", __func__, rect->width, rect->height, rect->left, rect->top); return ret; } ret = v4l2_subdev_call(sd, pad, get_fmt, NULL, &fmt); if (ret < 0) { dev_warn(dev, "%s: failed to fetch current format\n", __func__); return ret; } ret = dma_align(&mf->width, &mf->height, xlate->host_fmt, pcdev->vb_mode, false); if (ret < 0) { dev_err(dev, "%s: failed to align %ux%u %s with DMA\n", __func__, mf->width, mf->height, xlate->host_fmt->name); return ret; } if (!ret) { /* sensor returned geometry not DMA aligned, trying to fix */ ret = set_format(pcdev, dev, icd, sd, &fmt, xlate); if (ret < 0) { dev_err(dev, "%s: failed to set format\n", __func__); return ret; } } icd->user_width = mf->width; icd->user_height = mf->height; return 0; }
static int get_camera_subwin(struct soc_camera_device *icd, struct v4l2_rect *cam_subrect, unsigned int cam_hscale, unsigned int cam_vscale) { struct sh_mobile_ceu_cam *cam = icd->host_priv; struct v4l2_rect *ceu_rect = &cam->ceu_rect; if (!ceu_rect->width) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct device *dev = icd->dev.parent; struct v4l2_format f; struct v4l2_pix_format *pix = &f.fmt.pix; int ret; f.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; ret = v4l2_subdev_call(sd, video, g_fmt, &f); if (ret < 0) return ret; dev_geo(dev, "camera fmt %ux%u\n", pix->width, pix->height); if (pix->width > 2560) { ceu_rect->width = 2560; ceu_rect->left = (pix->width - 2560) / 2; } else { ceu_rect->width = pix->width; ceu_rect->left = 0; } if (pix->height > 1920) { ceu_rect->height = 1920; ceu_rect->top = (pix->height - 1920) / 2; } else { ceu_rect->height = pix->height; ceu_rect->top = 0; } dev_geo(dev, "initialised CEU rect %ux%u@%u:%u\n", ceu_rect->width, ceu_rect->height, ceu_rect->left, ceu_rect->top); } cam_subrect->width = scale_up(ceu_rect->width, cam_hscale); cam_subrect->left = scale_up(ceu_rect->left, cam_hscale); cam_subrect->height = scale_up(ceu_rect->height, cam_vscale); cam_subrect->top = scale_up(ceu_rect->top, cam_vscale); return 0; }
/** * @brief: the isp standard control should be implemented here. * the function is image adjust, color effect... * * @author: caolianming * @date: 2014-01-06 * @param [in] *ctrl: V4L2 image effect control information structure */ static int ak_camera_s_ctrl(struct v4l2_ctrl *ctrl) { struct v4l2_control control; struct soc_camera_device *icd = ctrl_to_icd(ctrl); struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct soc_camera_host *ici = to_soc_camera_host(icd->parent); struct ak_camera_dev *pcdev = ici->priv; isp_dbg("entry %s\n", __func__); switch (ctrl->id) { case V4L2_CID_BRIGHTNESS: if (isp_set_brightness(&pcdev->isp, ctrl) == 0) return 0; break; case V4L2_CID_CONTRAST: if (isp_set_gamma(&pcdev->isp, ctrl) == 0) return 0; break; case V4L2_CID_SATURATION: if (isp_set_saturation(&pcdev->isp, ctrl) == 0) return 0; break; case V4L2_CID_SHARPNESS: if (isp_set_sharpness(&pcdev->isp, ctrl) == 0) return 0; break; case V4L2_CID_HUE: break; case V4L2_CID_HUE_AUTO: break; case V4L2_CID_COLORFX: if (isp_set_uspecial_effect(&pcdev->isp, ctrl, 0) == 0) return 0; break; case V4L2_CID_DO_WHITE_BALANCE: if (isp_manu_set_wb_param(&pcdev->isp, ctrl, 0) == 0) return 0; break; case V4L2_CID_AUTO_WHITE_BALANCE: if (isp_auto_set_wb_param(&pcdev->isp, ctrl) == 0) return 0; break; } control.id = ctrl->id; control.value = ctrl->val; v4l2_subdev_call(sd, core, s_ctrl, &control); return 0; }
static int omap1_cam_set_fmt(struct soc_camera_device *icd, struct v4l2_format *f) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); const struct soc_camera_format_xlate *xlate; struct device *dev = icd->parent; struct soc_camera_host *ici = to_soc_camera_host(dev); struct omap1_cam_dev *pcdev = ici->priv; struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_subdev_format format = { .which = V4L2_SUBDEV_FORMAT_ACTIVE, }; struct v4l2_mbus_framefmt *mf = &format.format; int ret; xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); if (!xlate) { dev_warn(dev, "%s: format %#x not found\n", __func__, pix->pixelformat); return -EINVAL; } mf->width = pix->width; mf->height = pix->height; mf->field = pix->field; mf->colorspace = pix->colorspace; mf->code = xlate->code; ret = dma_align(&mf->width, &mf->height, xlate->host_fmt, pcdev->vb_mode, true); if (ret < 0) { dev_err(dev, "%s: failed to align %ux%u %s with DMA\n", __func__, pix->width, pix->height, xlate->host_fmt->name); return ret; } ret = set_format(pcdev, dev, icd, sd, &format, xlate); if (ret < 0) { dev_err(dev, "%s: failed to set format\n", __func__); return ret; } pix->width = mf->width; pix->height = mf->height; pix->field = mf->field; pix->colorspace = mf->colorspace; icd->current_fmt = xlate; return 0; }
static int client_scale(struct soc_camera_device *icd, struct v4l2_rect *rect, struct v4l2_rect *sub_rect, struct v4l2_rect *ceu_rect, struct v4l2_format *f, bool ceu_can_scale) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct sh_mobile_ceu_cam *cam = icd->host_priv; struct device *dev = icd->dev.parent; struct v4l2_format f_tmp = *f; struct v4l2_pix_format *pix_tmp = &f_tmp.fmt.pix; unsigned int scale_h, scale_v; int ret; ret = client_s_fmt(icd, &f_tmp, ceu_can_scale); if (ret < 0) return ret; dev_geo(dev, "5: camera scaled to %ux%u\n", pix_tmp->width, pix_tmp->height); ret = get_camera_scales(sd, rect, &scale_h, &scale_v); if (ret < 0) return ret; dev_geo(dev, "7: camera scales %u:%u\n", scale_h, scale_v); cam->cam_width = pix_tmp->width; cam->cam_height = pix_tmp->height; f->fmt.pix.width = pix_tmp->width; f->fmt.pix.height = pix_tmp->height; ceu_rect->left = scale_down(sub_rect->left, scale_h); ceu_rect->width = scale_down(sub_rect->width, scale_h); ceu_rect->top = scale_down(sub_rect->top, scale_v); ceu_rect->height = scale_down(sub_rect->height, scale_v); dev_geo(dev, "8: new CEU rect %ux%u@%u:%u\n", ceu_rect->width, ceu_rect->height, ceu_rect->left, ceu_rect->top); return 0; }
static int mx1_camera_set_fmt(struct soc_camera_device *icd, struct v4l2_format *f) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); const struct soc_camera_format_xlate *xlate; struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_mbus_framefmt mf; int ret, buswidth; xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); if (!xlate) { dev_warn(icd->parent, "Format %x not found\n", pix->pixelformat); return -EINVAL; } buswidth = xlate->host_fmt->bits_per_sample; if (buswidth > 8) { dev_warn(icd->parent, "bits-per-sample %d for format %x unsupported\n", buswidth, pix->pixelformat); return -EINVAL; } mf.width = pix->width; mf.height = pix->height; mf.field = pix->field; mf.colorspace = pix->colorspace; mf.code = xlate->code; ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf); if (ret < 0) return ret; if (mf.code != xlate->code) return -EINVAL; pix->width = mf.width; pix->height = mf.height; pix->field = mf.field; pix->colorspace = mf.colorspace; icd->current_fmt = xlate; return ret; }
static int gc0311_resume(struct soc_camera_device *icd) { struct v4l2_subdev *sd; struct gc0311_priv *priv; sd = soc_camera_to_subdev(icd); priv = container_of(sd, struct gc0311_priv, subdev); priv->flag_vflip = 0; priv->flag_hflip = 0; priv->brightness = 0; priv->contrast= 0; priv->sat= 0; priv->effect = 0; priv->wh_bal = 0; priv->inited = 0; return 0; }
/* * As long as we don't implement host-side cropping and scaling, we can use * default g_crop and cropcap from soc_camera.c */ static int mx3_camera_set_crop(struct soc_camera_device *icd, struct v4l2_crop *a) { struct v4l2_rect *rect = &a->c; struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent); struct mx3_camera_dev *mx3_cam = ici->priv; struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct v4l2_mbus_framefmt mf; int ret; soc_camera_limit_side(&rect->left, &rect->width, 0, 2, 4096); soc_camera_limit_side(&rect->top, &rect->height, 0, 2, 4096); ret = v4l2_subdev_call(sd, video, s_crop, a); if (ret < 0) return ret; /* The capture device might have changed its output */ ret = v4l2_subdev_call(sd, video, g_mbus_fmt, &mf); if (ret < 0) return ret; if (mf.width & 7) { /* Ouch! We can only handle 8-byte aligned width... */ stride_align(&mf.width); ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf); if (ret < 0) return ret; } if (mf.width != icd->user_width || mf.height != icd->user_height) configure_geometry(mx3_cam, mf.width, mf.height, mf.code); dev_dbg(icd->dev.parent, "Sensor cropped %dx%d\n", mf.width, mf.height); icd->user_width = mf.width; icd->user_height = mf.height; return ret; }
static int omap1_cam_try_fmt(struct soc_camera_device *icd, struct v4l2_format *f) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); const struct soc_camera_format_xlate *xlate; struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_subdev_pad_config pad_cfg; struct v4l2_subdev_format format = { .which = V4L2_SUBDEV_FORMAT_TRY, }; struct v4l2_mbus_framefmt *mf = &format.format; int ret; /* TODO: limit to mx1 hardware capabilities */ xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); if (!xlate) { dev_warn(icd->parent, "Format %#x not found\n", pix->pixelformat); return -EINVAL; } mf->width = pix->width; mf->height = pix->height; mf->field = pix->field; mf->colorspace = pix->colorspace; mf->code = xlate->code; /* limit to sensor capabilities */ ret = v4l2_subdev_call(sd, pad, set_fmt, &pad_cfg, &format); if (ret < 0) return ret; pix->width = mf->width; pix->height = mf->height; pix->field = mf->field; pix->colorspace = mf->colorspace; return 0; }
static int mx2_camera_set_fmt(struct soc_camera_device *icd, struct v4l2_format *f) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); const struct soc_camera_format_xlate *xlate; struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_mbus_framefmt mf; int ret; xlate = soc_camera_xlate_by_fourcc(icd, pix->pixelformat); if (!xlate) { dev_warn(icd->parent, "Format %x not found\n", pix->pixelformat); return -EINVAL; } mf.width = pix->width; mf.height = pix->height; mf.field = pix->field; mf.colorspace = pix->colorspace; mf.code = xlate->code; ret = v4l2_subdev_call(sd, video, s_mbus_fmt, &mf); if (ret < 0 && ret != -ENOIOCTLCMD) return ret; if (mf.code != xlate->code) return -EINVAL; pix->width = mf.width; pix->height = mf.height; pix->field = mf.field; pix->colorspace = mf.colorspace; icd->current_fmt = xlate; return 0; }
static int mx1_camera_set_bus_param(struct soc_camera_device *icd) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct soc_camera_host *ici = to_soc_camera_host(icd->parent); struct mx1_camera_dev *pcdev = ici->priv; struct v4l2_mbus_config cfg = {.type = V4L2_MBUS_PARALLEL,}; unsigned long common_flags; unsigned int csicr1; int ret; /* MX1 supports only 8bit buswidth */ ret = v4l2_subdev_call(sd, video, g_mbus_config, &cfg); if (!ret) { common_flags = soc_mbus_config_compatible(&cfg, CSI_BUS_FLAGS); if (!common_flags) { dev_warn(icd->parent, "Flags incompatible: camera 0x%x, host 0x%x\n", cfg.flags, CSI_BUS_FLAGS); return -EINVAL; } } else if (ret != -ENOIOCTLCMD) { return ret; } else { common_flags = CSI_BUS_FLAGS; } /* Make choises, based on platform choice */ if ((common_flags & V4L2_MBUS_VSYNC_ACTIVE_HIGH) && (common_flags & V4L2_MBUS_VSYNC_ACTIVE_LOW)) { if (!pcdev->pdata || pcdev->pdata->flags & MX1_CAMERA_VSYNC_HIGH) common_flags &= ~V4L2_MBUS_VSYNC_ACTIVE_LOW; else common_flags &= ~V4L2_MBUS_VSYNC_ACTIVE_HIGH; } if ((common_flags & V4L2_MBUS_PCLK_SAMPLE_RISING) && (common_flags & V4L2_MBUS_PCLK_SAMPLE_FALLING)) { if (!pcdev->pdata || pcdev->pdata->flags & MX1_CAMERA_PCLK_RISING) common_flags &= ~V4L2_MBUS_PCLK_SAMPLE_FALLING; else common_flags &= ~V4L2_MBUS_PCLK_SAMPLE_RISING; } if ((common_flags & V4L2_MBUS_DATA_ACTIVE_HIGH) && (common_flags & V4L2_MBUS_DATA_ACTIVE_LOW)) { if (!pcdev->pdata || pcdev->pdata->flags & MX1_CAMERA_DATA_HIGH) common_flags &= ~V4L2_MBUS_DATA_ACTIVE_LOW; else common_flags &= ~V4L2_MBUS_DATA_ACTIVE_HIGH; } cfg.flags = common_flags; ret = v4l2_subdev_call(sd, video, s_mbus_config, &cfg); if (ret < 0 && ret != -ENOIOCTLCMD) { dev_dbg(icd->parent, "camera s_mbus_config(0x%lx) returned %d\n", common_flags, ret); return ret; } csicr1 = __raw_readl(pcdev->base + CSICR1); if (common_flags & V4L2_MBUS_PCLK_SAMPLE_RISING) csicr1 |= CSICR1_REDGE; if (common_flags & V4L2_MBUS_VSYNC_ACTIVE_HIGH) csicr1 |= CSICR1_SOF_POL; if (common_flags & V4L2_MBUS_DATA_ACTIVE_LOW) csicr1 |= CSICR1_DATA_POL; __raw_writel(csicr1, pcdev->base + CSICR1); return 0; }
static int unicam_camera_try_fmt(struct soc_camera_device *icd, struct v4l2_format *f) { struct v4l2_subdev *sd = soc_camera_to_subdev(icd); const struct soc_camera_format_xlate *xlate; struct v4l2_pix_format *pix = &f->fmt.pix; struct v4l2_mbus_framefmt mf; struct v4l2_format thumb_fmt; struct v4l2_pix_format *thumb_pix; __u32 pixfmt = pix->pixelformat; int thumb=0; int ret; pr_debug("-enter"); xlate = soc_camera_xlate_by_fourcc(icd, pixfmt); if (!xlate) { dev_warn(icd->dev.parent, "Format %x not found\n", pixfmt); return -EINVAL; } pix->sizeimage = pix->height * pix->bytesperline; /* limit to sensor capabilities */ mf.width = pix->width; mf.height = pix->height; mf.field = pix->field; mf.colorspace = pix->colorspace; mf.code = xlate->code; ret = v4l2_subdev_call(sd, video, try_mbus_fmt, &mf); if (ret < 0) return ret; pix->width = mf.width; pix->height = mf.height; pix->colorspace = mf.colorspace; switch (mf.field) { case V4L2_FIELD_ANY: case V4L2_FIELD_NONE: pix->field = V4L2_FIELD_NONE; break; default: dev_err(icd->dev.parent, "Field type %d unsupported.\n", mf.field); return -EINVAL; } /* what format can unicam support */ switch (mf.code) { case V4L2_MBUS_FMT_JPEG_1X8: /* check here if thumbnail is supported and check thumbnail format */ ret = v4l2_subdev_call(sd, core, ioctl, VIDIOC_THUMB_SUPPORTED, (void *)&thumb); if ((!ret) && thumb) { ret = v4l2_subdev_call(sd, core, ioctl, VIDIOC_THUMB_G_FMT, (void *)&thumb_fmt); if (ret < 0) { dev_err(icd->dev.parent, "sensor driver should report thumbnail format\n"); return -EINVAL; } thumb_pix = &thumb_fmt.fmt.pix; switch (thumb_pix->pixelformat) { case V4L2_PIX_FMT_YUYV: case V4L2_PIX_FMT_UYVY: pr_debug ("sensor supports thumbnail %c%c%c%c format", pixfmtstr(thumb_pix->pixelformat)); break; default: dev_err(icd->dev.parent, "sensor thumbnail format %c%c%c%c not supported\n", pixfmtstr(thumb_pix->pixelformat)); return -EINVAL; } } else pr_debug ("sensor doesnot support thumbnail (thumb=%d, ret=%d)\n", thumb, ret); case V4L2_MBUS_FMT_YUYV8_2X8: case V4L2_MBUS_FMT_UYVY8_2X8: /* Above formats are supported */ break; default: dev_err(icd->dev.parent, "Sensor format code %d unsupported.\n", mf.code); return -EINVAL; } pr_debug("trying format=%c%c%c%c res=%dx%d success=%d", pixfmtstr(pixfmt), mf.width, mf.height, ret); pr_debug("-exit"); return ret; }
static int unicam_videobuf_start_streaming_int(struct unicam_camera_dev \ *unicam_dev, unsigned int count) { struct soc_camera_device *icd = unicam_dev->icd; struct v4l2_subdev_sensor_interface_parms if_params; struct v4l2_subdev *sd = soc_camera_to_subdev(icd); struct rx_stat_list rx; int ret; int lane_err; int thumb; u32 raw_rx; enum afe_num afe; enum host_mode hmode; enum csi1ccp2_clock_mode ccp2_clock; enum csi2_lanes lanes = CSI2_SINGLE_LANE; int vc = 0; int id = 0; struct int_desc idesc; struct lane_timing timing; pr_debug("-enter"); pr_debug("enabling csi"); unicam_dev->panic_count = 0; atomic_set(&unicam_dev->cam_triggered, 0); /* get the sensor interface information */ ret = v4l2_subdev_call(sd, sensor, g_interface_parms, &if_params); if (ret < 0) { dev_err(unicam_dev->dev, "error on g_inferface_params(%d)\n", ret); return ret; } unicam_dev->if_params = if_params; /* set camera interface parameters */ /* we only support serial and csi2 sensor */ if ((unicam_dev->if_params.if_type == V4L2_SUBDEV_SENSOR_SERIAL) && (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2)) { hmode = CSI2; unicam_dev->b_mode = BUFFER_TRIGGER; } else if ((unicam_dev->if_params.if_type == V4L2_SUBDEV_SENSOR_SERIAL) && (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI1)) { hmode = CSI1CCP2; unicam_dev->b_mode = BUFFER_SINGLE; } else { dev_err(unicam_dev->dev, "CSI2 iface only supported, requested iface %d mode=%d\n", unicam_dev->if_params.if_type, unicam_dev->if_params.if_mode); return -EINVAL; } if (unicam_dev->if_params.parms.serial.channel == 0) afe = AFE0; else if (unicam_dev->if_params.parms.serial.channel == 1) afe = AFE1; else { dev_err(unicam_dev->dev, "receiver only supports two channels, request channel=%d\n", unicam_dev->if_params.parms.serial.channel); return -EINVAL; } if ((unicam_dev->if_params.if_type == V4L2_SUBDEV_SENSOR_SERIAL) && (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2)) { hmode = CSI2; if (unicam_dev->if_params.parms.serial.lanes == 1) lanes = CSI2_SINGLE_LANE; else if (unicam_dev->if_params.parms.serial.lanes == 2) lanes = CSI2_DUAL_LANE; else { dev_err(unicam_dev->dev, "receiver only supports max 2 lanes, requested lanes(%d)\n", unicam_dev->if_params.parms.serial.lanes); return -EINVAL; } } else { hmode = CSI1CCP2; ccp2_clock = DATA_CLOCK; } unicam_dev->handle = get_mm_csi0_handle (hmode, afe, lanes ); if (unicam_dev->handle == NULL){ pr_err("Unable to get unicam handle\n"); return -EBUSY; } ret = mm_csi0_init(); if(ret){ pr_err("Unable to get unicam handle\n"); mm_csi0_teardown (); return -EINVAL; } mm_csi0_set_afe(); /* Digital PHY Setup */ /* Compulsary to get these values from sensor for CSI2*/ /* Don't care for CCP2/CSI1 can send a struct with junk values Will not be read */ timing.hs_settle_time = unicam_dev->if_params.parms.serial.hs_settle_time; timing.hs_term_time = unicam_dev->if_params.parms.serial.hs_term_time; pr_debug("HS: settle_t = %d, term_t = %d\n", timing.hs_settle_time, timing.hs_term_time); ret = mm_csi0_set_dig_phy(&timing); if(ret){ pr_err("Wrong digital timing\n"); mm_csi0_teardown (); return -EINVAL; } /* Set Mode */ mm_csi0_set_mode(ccp2_clock); /* set image identifier (CSI mode only) */ /* if thumbnail is supported we expect * thumbnail to be in image ptr format of thumbnails is yuv422 * format is checked in try format. * in case where thumbnail is not supported we get jpeg * image in data pointer. so we set the id as 0 */ thumb = 0; ret = v4l2_subdev_call(sd, core, ioctl, VIDIOC_THUMB_SUPPORTED, (void *)&thumb); if (ret < 0) dev_warn(unicam_dev->dev, "sensor returns error(%d) for VIDIOC_THUMB_SUPPORTED\n", ret); if ((icd->current_fmt->code == V4L2_MBUS_FMT_JPEG_1X8) && (thumb == 0)){ id = 0; } /* thumbnail not supported */ else { id = 0x1E; } if(icd->current_fmt->code == V4L2_MBUS_FMT_JPEG_1X8) pr_info("JPEG mode of capture !!!!\n"); if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI1){ id = 0; } ret = mm_csi0_cfg_image_id(vc, id); if(ret){ pr_err("Wrong Image IDs set for a given mode\n"); mm_csi0_teardown (); return -EINVAL; } ret = 0; /* pipelince decode */ /* Set vertical windowing */ if(unicam_dev->cap_mode){ ret |= mm_csi0_set_windowing_vertical(unicam_dev->crop.c.top, (unicam_dev->crop.c.top + unicam_dev->crop.c.height)); } else { ret |= mm_csi0_set_windowing_vertical(0, 0); } /* UNPACK */ ret |= mm_csi0_cfg_pipeline_unpack(PIX_UNPACK_NONE); /* DPCM decode */ ret |= mm_csi0_cfg_pipeline_dpcm_dec(DPCM_DEC_NONE); /* Set horizontal windowing */ if(unicam_dev->cap_mode){ ret |= mm_csi0_set_windowing_horizontal( unicam_dev->crop.c.left, (unicam_dev->crop.c.left + unicam_dev->crop.c.width)); } else { ret |= mm_csi0_set_windowing_horizontal(0, 0); } /* DPCM encode */ ret |= mm_csi0_cfg_pipeline_dpcm_enc(DPCM_ENC_NONE); /* PACK */ ret |= mm_csi0_cfg_pipeline_pack(PIX_PACK_NONE); /* FSP encode */ ret |= mm_csi0_enable_fsp_ccp2(); if(ret){ pr_err("Something wrong with pipeline config .. pl go check\n"); mm_csi0_teardown (); return -EINVAL; } /* Output engine */ mm_csi0_buffering_mode(unicam_dev->b_mode); mm_csi0_rx_burst(); mm_csi0_enable_unicam(); /* start sensor streaming */ ret = v4l2_subdev_call(sd, video, s_stream, 1); if (ret < 0 && ret != -ENOIOCTLCMD) { dev_err(unicam_dev->dev, "error on s_stream(%d)\n", ret); return ret; } udelay(30); if(unicam_dev->active){ /* unicam_camera_update_buf(unicam_dev); */ mm_csi0_start_rx(); /* set data capture */ if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2) { idesc.fsi = 1; idesc.fei = 1; idesc.lci = 0; idesc.die = 1; idesc.dataline = 2; mm_csi0_config_int(&idesc, IMAGE_BUFFER); mm_csi0_config_int(&idesc, DATA_BUFFER); unicam_camera_capture(unicam_dev); } else { idesc.fsi = 0; idesc.fei = 0; idesc.lci = unicam_dev->icd->user_height; idesc.die = 0; idesc.dataline = 0; mm_csi0_config_int(&idesc, IMAGE_BUFFER); } atomic_set(&unicam_dev->streaming, 1); /* Error check code */ /* Check RX state for errors */ memset(&rx, 0x00, sizeof(struct rx_stat_list)); raw_rx = mm_csi0_get_rx_stat(&rx, 1); pr_info("raw_rx is 0x%x", raw_rx); if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI1){ if(raw_rx & RX_CCP2_ERROR_MASK){ pr_info("Error seen pl check for CCP2 errors 0x%x\n", raw_rx); if(rx.ssc) pr_info("Shifted sync code in CCP2\n"); if (rx.ofo || rx.ifo || rx.bfo || rx.dl) pr_info("FIFO errors or data lost\n"); if(rx.crce) pr_info("CRC error\n"); } } else { if(raw_rx & RX_CSI2_ERROR_MASK){ pr_info("Error seen pl check for CSI2 errors 0x%x\n", raw_rx); if(rx.sbe || rx.pbe || rx.hoe || rx.ple) pr_info("Specific errors in CSI2\n"); if (rx.ofo || rx.ifo || rx.bfo || rx.dl) pr_info("FIFO errors or data lost\n"); if(rx.crce) pr_info("CRC error\n"); } } /* Check lane transitions */ if (unicam_dev->if_params.if_mode == V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2){ lane_err = mm_csi0_get_trans(); if(lane_err){ pr_err("Lane errors seen 0x%x\n", lane_err); /* return -EFAULT;*/ } } } unicam_reg_dump(); /* if (unicam_dev->active) if (unicam_dev->if_params.if_mode == \ V4L2_SUBDEV_SENSOR_MODE_SERIAL_CSI2) mod_timer(&unicam_dev->unicam_timer, \ jiffies + msecs_to_jiffies(UC_TIMEOUT_MS)); */ pr_debug("-exit"); return 0; }
/* should be called with unicam_dev->lock held */ static int unicam_camera_update_buf(struct unicam_camera_dev *unicam_dev) { struct v4l2_subdev *sd = soc_camera_to_subdev(unicam_dev->icd); struct buffer_desc im0, dat0; dma_addr_t phys_addr; unsigned int line_stride; struct v4l2_format thumb_fmt; struct v4l2_pix_format *pix; int thumb = 0, ret; pr_debug("-enter"); if (!unicam_dev->active) { pr_err("%s: Invalid buffer to Update", __func__); return -ENOMEM; } phys_addr = vb2_plane_dma_addr(unicam_dev->active, 0); pr_debug("updating buffer phys=0x%p", (void *)phys_addr); if (!phys_addr) { unicam_dev->active = NULL; pr_err("No valid address. skip capture\n"); return -ENOMEM; } /* For crop use-cases only linestride matters that too only for non-JPEG cases */ /* stride is in bytes */ if (unicam_dev->icd->current_fmt->code != V4L2_MBUS_FMT_JPEG_1X8) { if((unicam_dev->crop.c.top == 0) || (unicam_dev->crop.c.left == 0)){ /* Any one zero means no centering Reject all such crop attempts */ line_stride = soc_mbus_bytes_per_line(unicam_dev->icd->user_width, unicam_dev->icd-> current_fmt->host_fmt); } else { line_stride = soc_mbus_bytes_per_line(unicam_dev->crop.c.width, unicam_dev->icd-> current_fmt->host_fmt); } /* Non JPEG section of the code */ /* image 0 */ im0.start = (UInt32) phys_addr; im0.ls = (UInt32) line_stride; im0.size = line_stride * unicam_dev->icd->user_height; im0.wrap_en = 1; /* Coverity Fix: Dead Code */ /* if(unicam_dev->b_mode == BUFFER_DOUBLE && phys_addr1){ */ /* image 1 */ /* im1.start = phys_addr1; im1.ls = im0.ls; im1.size = im0.size; mm_csi0_update_addr(&im0, &im1, NULL, NULL); } else { */ mm_csi0_update_addr(&im0, NULL, NULL, NULL); pr_debug("Adr 0x%x ls 0x%x size 0x%x\n", im0.start, im0.ls, im0.size); /* } */ } else { /* JPEG section always in DAT0 */ /* check whether sensor supports thumbnail */ ret = v4l2_subdev_call(sd, core, ioctl, VIDIOC_THUMB_SUPPORTED, (void *)&thumb); if ((!ret) && thumb) { ret = v4l2_subdev_call(sd, core, ioctl, VIDIOC_THUMB_G_FMT, (void *)&thumb_fmt); if (ret < 0) { dev_err(unicam_dev->dev, "sensor driver should report thumbnail format\n"); return -1; } /* image 0 */ pix = &thumb_fmt.fmt.pix; line_stride = unicam_dev->icd->user_width; im0.start = (UInt32) phys_addr; im0.ls = (UInt32) pix->bytesperline; im0.size = pix->sizeimage; im0.wrap_en = 1; /* DAT0 to an address after THUMB */ dat0.start = (UInt32) ((char *)phys_addr + pix->sizeimage); dat0.ls = line_stride; dat0.size = line_stride * unicam_dev->icd->user_height * 3/2; mm_csi0_update_addr(&im0, NULL, &dat0, NULL); } else { /* no thumbnail supported */ /* don't set image0 since we are expecting data0 * to contain jpeg data */ dat0.start = (UInt32) phys_addr; dat0.ls = unicam_dev->icd->user_width; dat0.size = unicam_dev->icd->user_width * unicam_dev->icd->user_height * 3/2; } } pr_debug("-exit"); return 0; }