static const struct fimc_fmt *fimc_lite_try_format(struct fimc_lite *fimc, u32 *width, u32 *height, u32 *code, u32 *fourcc, int pad) { struct flite_variant *variant = fimc->variant; const struct fimc_fmt *fmt; fmt = fimc_lite_find_format(fourcc, code, 0); if (WARN_ON(!fmt)) return NULL; if (code) *code = fmt->mbus_code; if (fourcc) *fourcc = fmt->fourcc; if (pad == FLITE_SD_PAD_SINK) { v4l_bound_align_image(width, 8, variant->max_width, ffs(variant->out_width_align) - 1, height, 0, variant->max_height, 0, 0); } else { v4l_bound_align_image(width, 8, fimc->inp_frame.rect.width, ffs(variant->out_width_align) - 1, height, 0, fimc->inp_frame.rect.height, 0, 0); } v4l2_dbg(1, debug, &fimc->subdev, "code: 0x%x, %dx%d\n", code ? *code : 0, *width, *height); return fmt; }
void rot_bound_align_image(struct rot_ctx *ctx, struct rot_fmt *rot_fmt, u32 *width, u32 *height) { struct exynos_rot_variant *variant = ctx->rot_dev->variant; struct exynos_rot_size_limit *limit = NULL; switch (rot_fmt->pixelformat) { case V4L2_PIX_FMT_YUV420M: limit = &variant->limit_yuv420_3p; break; case V4L2_PIX_FMT_NV12M: limit = &variant->limit_yuv420_2p; break; case V4L2_PIX_FMT_YUYV: limit = &variant->limit_yuv422; break; case V4L2_PIX_FMT_RGB565: limit = &variant->limit_rgb565; break; case V4L2_PIX_FMT_RGB32: limit = &variant->limit_rgb888; break; default: break; } /* Bound an image to have width and height in limit */ v4l_bound_align_image(width, limit->min_x, limit->max_x, limit->align, height, limit->min_y, limit->max_y, limit->align, 0); }
//0.3 - 4 设置格式前肯定会先测试 static int myvivi_vidioc_try_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f) { unsigned int maxw, maxh; enum v4l2_field field; //在枚举支持的格式时,我们只写明只支持V4L2_PIX_FMT_YUYV,所以下面只判断这个格式是否支持。 if(f->fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV) return -EINVAL; field = f->fmt.pix.field; if (field == V4L2_FIELD_ANY) { field = V4L2_FIELD_INTERLACED; } else if (V4L2_FIELD_INTERLACED != field) { return -EINVAL; } maxw = 1024; maxh = 768; //高速format宽度和高度,计算每行占的字节数和整个图像的大小。 v4l_bound_align_image(&f->fmt.pix.width, 48, 2048, 2, &f->fmt.pix.height, 32, 1536, 0, 0); f->fmt.pix.bytesperline =//每一行占的字节数 (f->fmt.pix.width * 16) >> 3;//depth表示颜色深度。这里直接写成16. f->fmt.pix.sizeimage =//整个图像的大小 f->fmt.pix.height * f->fmt.pix.bytesperline; return 0; }
/*测试驱动程序是否支持某种格式*/ static int myvivi_vidioc_try_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f) { unsigned int maxw, maxh; enum v4l2_field field; if(f->fmt.pix.pixelformat != V4L2_PIX_FMT_YUYV) return -EINVAL; field = f->fmt.pix.field; if (field == V4L2_FIELD_ANY) { field = V4L2_FIELD_INTERLACED; } else if (V4L2_FIELD_INTERLACED != field) { return -EINVAL; } maxw = 1024; maxh = 768; v4l_bound_align_image(&f->fmt.pix.width, 48, maxw, 2, &f->fmt.pix.height, 32, maxh, 0, 0); f->fmt.pix.bytesperline = (f->fmt.pix.width * 16) >> 3; f->fmt.pix.sizeimage = f->fmt.pix.height * f->fmt.pix.bytesperline; return 0; }
static int fimc_lite_try_fmt(struct fimc_lite *fimc, struct v4l2_pix_format_mplane *pixm, const struct fimc_fmt **ffmt) { struct flite_variant *variant = fimc->variant; u32 bpl = pixm->plane_fmt[0].bytesperline; const struct fimc_fmt *fmt; fmt = fimc_lite_find_format(&pixm->pixelformat, NULL, 0); if (WARN_ON(fmt == NULL)) return -EINVAL; if (ffmt) *ffmt = fmt; v4l_bound_align_image(&pixm->width, 8, variant->max_width, ffs(variant->out_width_align) - 1, &pixm->height, 0, variant->max_height, 0, 0); if ((bpl == 0 || ((bpl * 8) / fmt->depth[0]) < pixm->width)) pixm->plane_fmt[0].bytesperline = (pixm->width * fmt->depth[0]) / 8; if (pixm->plane_fmt[0].sizeimage == 0) pixm->plane_fmt[0].sizeimage = (pixm->width * pixm->height * fmt->depth[0]) / 8; pixm->num_planes = fmt->memplanes; pixm->pixelformat = fmt->fourcc; pixm->colorspace = V4L2_COLORSPACE_JPEG; pixm->field = V4L2_FIELD_NONE; return 0; }
static void __isp_subdev_try_format(struct fimc_isp *isp, struct v4l2_subdev_fh *fh, struct v4l2_subdev_format *fmt) { struct v4l2_mbus_framefmt *mf = &fmt->format; struct v4l2_mbus_framefmt *format; mf->colorspace = V4L2_COLORSPACE_SRGB; if (fmt->pad == FIMC_ISP_SD_PAD_SINK) { v4l_bound_align_image(&mf->width, FIMC_ISP_SINK_WIDTH_MIN, FIMC_ISP_SINK_WIDTH_MAX, 0, &mf->height, FIMC_ISP_SINK_HEIGHT_MIN, FIMC_ISP_SINK_HEIGHT_MAX, 0, 0); mf->code = V4L2_MBUS_FMT_SGRBG10_1X10; } else { if (fmt->which == V4L2_SUBDEV_FORMAT_TRY) format = v4l2_subdev_get_try_format(fh, FIMC_ISP_SD_PAD_SINK); else format = &isp->sink_fmt; /* Allow changing format only on sink pad */ mf->width = format->width - FIMC_ISP_CAC_MARGIN_WIDTH; mf->height = format->height - FIMC_ISP_CAC_MARGIN_HEIGHT; if (fmt->pad == FIMC_ISP_SD_PAD_SRC_FIFO) { mf->code = V4L2_MBUS_FMT_YUV10_1X30; mf->colorspace = V4L2_COLORSPACE_JPEG; } else { mf->code = format->code; } } }
void mtk_vcodec_enc_set_default_params(struct mtk_vcodec_ctx *ctx) { struct mtk_q_data *q_data; ctx->m2m_ctx->q_lock = &ctx->dev->dev_mutex; ctx->fh.m2m_ctx = ctx->m2m_ctx; ctx->fh.ctrl_handler = &ctx->ctrl_hdl; INIT_WORK(&ctx->encode_work, mtk_venc_worker); ctx->colorspace = V4L2_COLORSPACE_REC709; ctx->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT; ctx->quantization = V4L2_QUANTIZATION_DEFAULT; ctx->xfer_func = V4L2_XFER_FUNC_DEFAULT; q_data = &ctx->q_data[MTK_Q_DATA_SRC]; memset(q_data, 0, sizeof(struct mtk_q_data)); q_data->visible_width = DFT_CFG_WIDTH; q_data->visible_height = DFT_CFG_HEIGHT; q_data->coded_width = DFT_CFG_WIDTH; q_data->coded_height = DFT_CFG_HEIGHT; q_data->field = V4L2_FIELD_NONE; q_data->fmt = &mtk_video_formats[OUT_FMT_IDX]; v4l_bound_align_image(&q_data->coded_width, MTK_VENC_MIN_W, MTK_VENC_MAX_W, 4, &q_data->coded_height, MTK_VENC_MIN_H, MTK_VENC_MAX_H, 5, 6); if (q_data->coded_width < DFT_CFG_WIDTH && (q_data->coded_width + 16) <= MTK_VENC_MAX_W) q_data->coded_width += 16; if (q_data->coded_height < DFT_CFG_HEIGHT && (q_data->coded_height + 32) <= MTK_VENC_MAX_H) q_data->coded_height += 32; q_data->sizeimage[0] = q_data->coded_width * q_data->coded_height+ ((ALIGN(q_data->coded_width, 16) * 2) * 16); q_data->bytesperline[0] = q_data->coded_width; q_data->sizeimage[1] = (q_data->coded_width * q_data->coded_height) / 2 + (ALIGN(q_data->coded_width, 16) * 16); q_data->bytesperline[1] = q_data->coded_width; q_data = &ctx->q_data[MTK_Q_DATA_DST]; memset(q_data, 0, sizeof(struct mtk_q_data)); q_data->coded_width = DFT_CFG_WIDTH; q_data->coded_height = DFT_CFG_HEIGHT; q_data->fmt = &mtk_video_formats[CAP_FMT_IDX]; q_data->field = V4L2_FIELD_NONE; ctx->q_data[MTK_Q_DATA_DST].sizeimage[0] = DFT_CFG_WIDTH * DFT_CFG_HEIGHT; ctx->q_data[MTK_Q_DATA_DST].bytesperline[0] = 0; }
static int mt9v011_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *fmt) { struct v4l2_pix_format *pix = &fmt->fmt.pix; if (pix->pixelformat != V4L2_PIX_FMT_SGRBG8) return -EINVAL; v4l_bound_align_image(&pix->width, 48, 639, 1, &pix->height, 32, 480, 1, 0); return 0; }
static int mt9v011_try_mbus_fmt(struct v4l2_subdev *sd, struct v4l2_mbus_framefmt *fmt) { if (fmt->code != V4L2_MBUS_FMT_SGRBG8_1X8) return -EINVAL; v4l_bound_align_image(&fmt->width, 48, 639, 1, &fmt->height, 32, 480, 1, 0); fmt->field = V4L2_FIELD_NONE; fmt->colorspace = V4L2_COLORSPACE_SRGB; return 0; }
static const struct fimc_fmt *fimc_lite_subdev_try_fmt(struct fimc_lite *fimc, struct v4l2_subdev_pad_config *cfg, struct v4l2_subdev_format *format) { struct flite_drvdata *dd = fimc->dd; struct v4l2_mbus_framefmt *mf = &format->format; const struct fimc_fmt *fmt = NULL; if (format->pad == FLITE_SD_PAD_SINK) { v4l_bound_align_image(&mf->width, 8, dd->max_width, ffs(dd->out_width_align) - 1, &mf->height, 0, dd->max_height, 0, 0); fmt = fimc_lite_find_format(NULL, &mf->code, 0, 0); if (WARN_ON(!fmt)) return NULL; mf->colorspace = fmt->colorspace; mf->code = fmt->mbus_code; } else { struct flite_frame *sink = &fimc->inp_frame; struct v4l2_mbus_framefmt *sink_fmt; struct v4l2_rect *rect; if (format->which == V4L2_SUBDEV_FORMAT_TRY) { sink_fmt = v4l2_subdev_get_try_format(&fimc->subdev, cfg, FLITE_SD_PAD_SINK); mf->code = sink_fmt->code; mf->colorspace = sink_fmt->colorspace; rect = v4l2_subdev_get_try_crop(&fimc->subdev, cfg, FLITE_SD_PAD_SINK); } else { mf->code = sink->fmt->mbus_code; mf->colorspace = sink->fmt->colorspace; rect = &sink->rect; } /* Allow changing format only on sink pad */ mf->width = rect->width; mf->height = rect->height; } mf->field = V4L2_FIELD_NONE; v4l2_dbg(1, debug, &fimc->subdev, "code: %#x (%d), %dx%d\n", mf->code, mf->colorspace, mf->width, mf->height); return fmt; }
static int fimc_m2m_try_crop(struct fimc_ctx *ctx, struct v4l2_crop *cr) { struct fimc_dev *fimc = ctx->fimc_dev; struct fimc_frame *f; u32 min_size, halign, depth = 0; int i; if (cr->c.top < 0 || cr->c.left < 0) { v4l2_err(&fimc->m2m.vfd, "doesn't support negative values for top & left\n"); return -EINVAL; } if (cr->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) f = &ctx->d_frame; else if (cr->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) f = &ctx->s_frame; else return -EINVAL; min_size = (f == &ctx->s_frame) ? fimc->variant->min_inp_pixsize : fimc->variant->min_out_pixsize; /* Get pixel alignment constraints. */ if (fimc->variant->min_vsize_align == 1) halign = fimc_fmt_is_rgb(f->fmt->color) ? 0 : 1; else halign = ffs(fimc->variant->min_vsize_align) - 1; for (i = 0; i < f->fmt->colplanes; i++) depth += f->fmt->depth[i]; v4l_bound_align_image(&cr->c.width, min_size, f->o_width, ffs(min_size) - 1, &cr->c.height, min_size, f->o_height, halign, 64/(ALIGN(depth, 8))); /* adjust left/top if cropping rectangle is out of bounds */ if (cr->c.left + cr->c.width > f->o_width) cr->c.left = f->o_width - cr->c.width; if (cr->c.top + cr->c.height > f->o_height) cr->c.top = f->o_height - cr->c.height; cr->c.left = round_down(cr->c.left, min_size); cr->c.top = round_down(cr->c.top, fimc->variant->hor_offs_align); dbg("l:%d, t:%d, w:%d, h:%d, f_w: %d, f_h: %d", cr->c.left, cr->c.top, cr->c.width, cr->c.height, f->f_width, f->f_height); return 0; }
static int ak881x_try_g_mbus_fmt(struct v4l2_subdev *sd, struct v4l2_mbus_framefmt *mf) { struct i2c_client *client = v4l2_get_subdevdata(sd); struct ak881x *ak881x = to_ak881x(client); v4l_bound_align_image(&mf->width, 0, 720, 2, &mf->height, 0, ak881x->lines, 1, 0); mf->field = V4L2_FIELD_INTERLACED; mf->code = V4L2_MBUS_FMT_YUYV8_2X8; mf->colorspace = V4L2_COLORSPACE_SMPTE170M; return 0; }
static int hva_try_fmt_frame(struct file *file, void *priv, struct v4l2_format *f) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct device *dev = ctx_to_dev(ctx); struct v4l2_pix_format *pix = &f->fmt.pix; u32 pixelformat = pix->pixelformat; const struct hva_enc *enc; u32 width, height; enc = hva_find_encoder(ctx, pixelformat, ctx->streaminfo.streamformat); if (!enc) { dev_dbg(dev, "%s V4L2 TRY_FMT (OUTPUT): unsupported format %.4s\n", ctx->name, (char *)&pixelformat); return -EINVAL; } /* adjust width & height */ width = pix->width; height = pix->height; v4l_bound_align_image(&pix->width, HVA_MIN_WIDTH, HVA_MAX_WIDTH, frame_alignment(pixelformat) - 1, &pix->height, HVA_MIN_HEIGHT, HVA_MAX_HEIGHT, frame_alignment(pixelformat) - 1, 0); if ((pix->width != width) || (pix->height != height)) dev_dbg(dev, "%s V4L2 TRY_FMT (OUTPUT): resolution updated %dx%d -> %dx%d to fit min/max/alignment\n", ctx->name, width, height, pix->width, pix->height); width = ALIGN(pix->width, HVA_WIDTH_ALIGNMENT); height = ALIGN(pix->height, HVA_HEIGHT_ALIGNMENT); if (!pix->colorspace) { pix->colorspace = V4L2_COLORSPACE_REC709; pix->xfer_func = V4L2_XFER_FUNC_DEFAULT; pix->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT; pix->quantization = V4L2_QUANTIZATION_DEFAULT; } pix->bytesperline = frame_stride(width, pixelformat); pix->sizeimage = frame_size(width, height, pixelformat); pix->field = V4L2_FIELD_NONE; return 0; }
static int bdisp_try_fmt(struct file *file, void *fh, struct v4l2_format *f) { struct bdisp_ctx *ctx = fh_to_ctx(fh); struct v4l2_pix_format *pix = &f->fmt.pix; const struct bdisp_fmt *format; u32 in_w, in_h; format = bdisp_find_fmt(pix->pixelformat); if (!format) { dev_dbg(ctx->bdisp_dev->dev, "Unknown format 0x%x\n", pix->pixelformat); return -EINVAL; } /* YUV420P only supported for VIDEO_OUTPUT */ if ((format->pixelformat == V4L2_PIX_FMT_YUV420) && (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)) { dev_dbg(ctx->bdisp_dev->dev, "No YU12 on capture\n"); return -EINVAL; } /* Field (interlaced only supported on OUTPUT) */ if ((f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) || (pix->field != V4L2_FIELD_INTERLACED)) pix->field = V4L2_FIELD_NONE; /* Adjust width & height */ in_w = pix->width; in_h = pix->height; v4l_bound_align_image(&pix->width, BDISP_MIN_W, BDISP_MAX_W, ffs(format->w_align) - 1, &pix->height, BDISP_MIN_H, BDISP_MAX_H, ffs(format->h_align) - 1, 0); if ((pix->width != in_w) || (pix->height != in_h)) dev_dbg(ctx->bdisp_dev->dev, "%s size updated: %dx%d -> %dx%d\n", __func__, in_w, in_h, pix->width, pix->height); pix->bytesperline = (pix->width * format->bpp_plane0) / 8; pix->sizeimage = (pix->width * pix->height * format->bpp) / 8; if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) pix->colorspace = bdisp_dflt_fmt.colorspace; return 0; }
static void fimc_lite_try_crop(struct fimc_lite *fimc, struct v4l2_rect *r) { struct flite_frame *frame = &fimc->inp_frame; v4l_bound_align_image(&r->width, 0, frame->f_width, 0, &r->height, 0, frame->f_height, 0, 0); /* Adjust left/top if cropping rectangle got out of bounds */ r->left = clamp_t(u32, r->left, 0, frame->f_width - r->width); r->left = round_down(r->left, fimc->variant->win_hor_offs_align); r->top = clamp_t(u32, r->top, 0, frame->f_height - r->height); v4l2_dbg(1, debug, &fimc->subdev, "(%d,%d)/%dx%d, sink fmt: %dx%d", r->left, r->top, r->width, r->height, frame->f_width, frame->f_height); }
static int fimc_try_fmt_mplane(struct fimc_ctx *ctx, struct v4l2_format *f) { struct fimc_dev *fimc = ctx->fimc_dev; struct fimc_variant *variant = fimc->variant; struct v4l2_pix_format_mplane *pix = &f->fmt.pix_mp; struct fimc_fmt *fmt; u32 max_w, mod_x, mod_y; if (!IS_M2M(f->type)) return -EINVAL; dbg("w: %d, h: %d", pix->width, pix->height); fmt = fimc_find_format(&pix->pixelformat, NULL, get_m2m_fmt_flags(f->type), 0); if (WARN(fmt == NULL, "Pixel format lookup failed")) return -EINVAL; if (pix->field == V4L2_FIELD_ANY) pix->field = V4L2_FIELD_NONE; else if (pix->field != V4L2_FIELD_NONE) return -EINVAL; if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) { max_w = variant->pix_limit->scaler_dis_w; mod_x = ffs(variant->min_inp_pixsize) - 1; } else { max_w = variant->pix_limit->out_rot_dis_w; mod_x = ffs(variant->min_out_pixsize) - 1; } if (tiled_fmt(fmt)) { mod_x = 6; /* 64 x 32 pixels tile */ mod_y = 5; } else { if (variant->min_vsize_align == 1) mod_y = fimc_fmt_is_rgb(fmt->color) ? 0 : 1; else mod_y = ffs(variant->min_vsize_align) - 1; } v4l_bound_align_image(&pix->width, 16, max_w, mod_x, &pix->height, 8, variant->pix_limit->scaler_dis_w, mod_y, 0); fimc_adjust_mplane_format(fmt, pix->width, pix->height, &f->fmt.pix_mp); return 0; }
static void rvin_format_align(struct rvin_dev *vin, struct v4l2_pix_format *pix) { u32 walign; if (!rvin_format_from_pixel(pix->pixelformat) || (vin->info->model == RCAR_M1 && pix->pixelformat == V4L2_PIX_FMT_XBGR32)) pix->pixelformat = RVIN_DEFAULT_FORMAT; switch (pix->field) { case V4L2_FIELD_TOP: case V4L2_FIELD_BOTTOM: case V4L2_FIELD_NONE: case V4L2_FIELD_INTERLACED_TB: case V4L2_FIELD_INTERLACED_BT: case V4L2_FIELD_INTERLACED: break; case V4L2_FIELD_ALTERNATE: /* * Driver does not (yet) support outputting ALTERNATE to a * userspace. It does support outputting INTERLACED so use * the VIN hardware to combine the two fields. */ pix->field = V4L2_FIELD_INTERLACED; pix->height *= 2; break; default: pix->field = RVIN_DEFAULT_FIELD; break; } /* HW limit width to a multiple of 32 (2^5) for NV16 else 2 (2^1) */ walign = vin->format.pixelformat == V4L2_PIX_FMT_NV16 ? 5 : 1; /* Limit to VIN capabilities */ v4l_bound_align_image(&pix->width, 2, vin->info->max_width, walign, &pix->height, 4, vin->info->max_height, 2, 0); pix->bytesperline = rvin_format_bytesperline(pix); pix->sizeimage = rvin_format_sizeimage(pix); vin_dbg(vin, "Format %ux%u bpl: %u size: %u\n", pix->width, pix->height, pix->bytesperline, pix->sizeimage); }
static void __isp_video_try_fmt(struct fimc_isp *isp, struct v4l2_pix_format_mplane *pixm, const struct fimc_fmt **fmt) { *fmt = fimc_isp_find_format(&pixm->pixelformat, NULL, 2); pixm->colorspace = V4L2_COLORSPACE_SRGB; pixm->field = V4L2_FIELD_NONE; pixm->num_planes = (*fmt)->memplanes; pixm->pixelformat = (*fmt)->fourcc; /* * TODO: double check with the docmentation these width/height * constraints are correct. */ v4l_bound_align_image(&pixm->width, FIMC_ISP_SOURCE_WIDTH_MIN, FIMC_ISP_SOURCE_WIDTH_MAX, 3, &pixm->height, FIMC_ISP_SOURCE_HEIGHT_MIN, FIMC_ISP_SOURCE_HEIGHT_MAX, 0, 0); }
static int fimc_lite_try_fmt(struct fimc_lite *fimc, struct v4l2_pix_format_mplane *pixm, const struct fimc_fmt **ffmt) { u32 bpl = pixm->plane_fmt[0].bytesperline; struct flite_drvdata *dd = fimc->dd; const struct fimc_fmt *inp_fmt = fimc->inp_frame.fmt; const struct fimc_fmt *fmt; if (WARN_ON(inp_fmt == NULL)) return -EINVAL; /* * We allow some flexibility only for YUV formats. In case of raw * raw Bayer the FIMC-LITE's output format must match its camera * interface input format. */ if (inp_fmt->flags & FMT_FLAGS_YUV) fmt = fimc_lite_find_format(&pixm->pixelformat, NULL, inp_fmt->flags, 0); else fmt = inp_fmt; if (WARN_ON(fmt == NULL)) return -EINVAL; if (ffmt) *ffmt = fmt; v4l_bound_align_image(&pixm->width, 8, dd->max_width, ffs(dd->out_width_align) - 1, &pixm->height, 0, dd->max_height, 0, 0); if ((bpl == 0 || ((bpl * 8) / fmt->depth[0]) < pixm->width)) pixm->plane_fmt[0].bytesperline = (pixm->width * fmt->depth[0]) / 8; if (pixm->plane_fmt[0].sizeimage == 0) pixm->plane_fmt[0].sizeimage = (pixm->width * pixm->height * fmt->depth[0]) / 8; pixm->num_planes = fmt->memplanes; pixm->pixelformat = fmt->fourcc; pixm->colorspace = fmt->colorspace; pixm->field = V4L2_FIELD_NONE; return 0; }
static int ak881x_fill_fmt(struct v4l2_subdev *sd, struct v4l2_subdev_pad_config *cfg, struct v4l2_subdev_format *format) { struct v4l2_mbus_framefmt *mf = &format->format; struct i2c_client *client = v4l2_get_subdevdata(sd); struct ak881x *ak881x = to_ak881x(client); if (format->pad) return -EINVAL; v4l_bound_align_image(&mf->width, 0, 720, 2, &mf->height, 0, ak881x->lines, 1, 0); mf->field = V4L2_FIELD_INTERLACED; mf->code = MEDIA_BUS_FMT_YUYV8_2X8; mf->colorspace = V4L2_COLORSPACE_SMPTE170M; return 0; }
static void vdic_try_fmt(struct vdic_priv *priv, struct v4l2_subdev_pad_config *cfg, struct v4l2_subdev_format *sdformat, const struct imx_media_pixfmt **cc) { struct v4l2_mbus_framefmt *infmt; *cc = imx_media_find_ipu_format(sdformat->format.code, CS_SEL_YUV); if (!*cc) { u32 code; imx_media_enum_ipu_format(&code, 0, CS_SEL_YUV); *cc = imx_media_find_ipu_format(code, CS_SEL_YUV); sdformat->format.code = (*cc)->codes[0]; } infmt = __vdic_get_fmt(priv, cfg, priv->active_input_pad, sdformat->which); switch (sdformat->pad) { case VDIC_SRC_PAD_DIRECT: sdformat->format = *infmt; /* output is always progressive! */ sdformat->format.field = V4L2_FIELD_NONE; break; case VDIC_SINK_PAD_DIRECT: case VDIC_SINK_PAD_IDMAC: v4l_bound_align_image(&sdformat->format.width, MIN_W, MAX_W_VDIC, W_ALIGN, &sdformat->format.height, MIN_H, MAX_H_VDIC, H_ALIGN, S_ALIGN); imx_media_fill_default_mbus_fields(&sdformat->format, infmt, true); /* input must be interlaced! Choose SEQ_TB if not */ if (!V4L2_FIELD_HAS_BOTH(sdformat->format.field)) sdformat->format.field = V4L2_FIELD_SEQ_TB; break; } }
static int dma_align(int *width, int *height, const struct soc_mbus_pixelfmt *fmt, enum omap1_cam_vb_mode vb_mode, bool enlarge) { s32 bytes_per_line = soc_mbus_bytes_per_line(*width, fmt); if (bytes_per_line < 0) return bytes_per_line; if (!is_dma_aligned(bytes_per_line, *height, vb_mode)) { unsigned int pxalign = __fls(bytes_per_line / *width); unsigned int salign = DMA_FRAME_SHIFT(vb_mode) + DMA_ELEMENT_SHIFT - pxalign; unsigned int incr = enlarge << salign; v4l_bound_align_image(width, 1, *width + incr, 0, height, 1, *height + incr, 0, salign); return 0; } return 1; }
int gsc_try_crop(struct gsc_ctx *ctx, struct v4l2_crop *cr) { struct gsc_frame *f; struct gsc_dev *gsc = ctx->gsc_dev; struct gsc_variant *variant = gsc->variant; u32 mod_x = 0, mod_y = 0, tmp_w, tmp_h; u32 min_w, min_h, max_w, max_h; if (cr->c.top < 0 || cr->c.left < 0) { pr_err("doesn't support negative values for top & left\n"); return -EINVAL; } pr_debug("user put w: %d, h: %d", cr->c.width, cr->c.height); if (cr->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) f = &ctx->d_frame; else if (cr->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) f = &ctx->s_frame; else return -EINVAL; max_w = f->f_width; max_h = f->f_height; tmp_w = cr->c.width; tmp_h = cr->c.height; if (V4L2_TYPE_IS_OUTPUT(cr->type)) { if ((is_yuv422(f->fmt->color) && f->fmt->num_comp == 1) || is_rgb(f->fmt->color)) min_w = 32; else min_w = 64; if ((is_yuv422(f->fmt->color) && f->fmt->num_comp == 3) || is_yuv420(f->fmt->color)) min_h = 32; else min_h = 16; } else { if (is_yuv420(f->fmt->color) || is_yuv422(f->fmt->color)) mod_x = ffs(variant->pix_align->target_w) - 1; if (is_yuv420(f->fmt->color)) mod_y = ffs(variant->pix_align->target_h) - 1; if (ctx->gsc_ctrls.rotate->val == 90 || ctx->gsc_ctrls.rotate->val == 270) { max_w = f->f_height; max_h = f->f_width; min_w = variant->pix_min->target_rot_en_w; min_h = variant->pix_min->target_rot_en_h; tmp_w = cr->c.height; tmp_h = cr->c.width; } else { min_w = variant->pix_min->target_rot_dis_w; min_h = variant->pix_min->target_rot_dis_h; } } pr_debug("mod_x: %d, mod_y: %d, min_w: %d, min_h = %d", mod_x, mod_y, min_w, min_h); pr_debug("tmp_w : %d, tmp_h : %d", tmp_w, tmp_h); v4l_bound_align_image(&tmp_w, min_w, max_w, mod_x, &tmp_h, min_h, max_h, mod_y, 0); if (!V4L2_TYPE_IS_OUTPUT(cr->type) && (ctx->gsc_ctrls.rotate->val == 90 || ctx->gsc_ctrls.rotate->val == 270)) gsc_check_crop_change(tmp_h, tmp_w, &cr->c.width, &cr->c.height); else gsc_check_crop_change(tmp_w, tmp_h, &cr->c.width, &cr->c.height); /* adjust left/top if cropping rectangle is out of bounds */ /* Need to add code to algin left value with 2's multiple */ if (cr->c.left + tmp_w > max_w) cr->c.left = max_w - tmp_w; if (cr->c.top + tmp_h > max_h) cr->c.top = max_h - tmp_h; if ((is_yuv420(f->fmt->color) || is_yuv422(f->fmt->color)) && cr->c.left & 1) cr->c.left -= 1; pr_debug("Aligned l:%d, t:%d, w:%d, h:%d, f_w: %d, f_h: %d", cr->c.left, cr->c.top, cr->c.width, cr->c.height, max_w, max_h); return 0; }
int gsc_try_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f) { struct gsc_dev *gsc = ctx->gsc_dev; struct gsc_variant *variant = gsc->variant; struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp; const struct gsc_fmt *fmt; u32 max_w, max_h, mod_x, mod_y; u32 min_w, min_h, tmp_w, tmp_h; int i; pr_debug("user put w: %d, h: %d", pix_mp->width, pix_mp->height); fmt = find_fmt(&pix_mp->pixelformat, NULL, 0); if (!fmt) { pr_err("pixelformat format (0x%X) invalid\n", pix_mp->pixelformat); return -EINVAL; } if (pix_mp->field == V4L2_FIELD_ANY) pix_mp->field = V4L2_FIELD_NONE; else if (pix_mp->field != V4L2_FIELD_NONE) { pr_debug("Not supported field order(%d)\n", pix_mp->field); return -EINVAL; } max_w = variant->pix_max->target_rot_dis_w; max_h = variant->pix_max->target_rot_dis_h; mod_x = ffs(variant->pix_align->org_w) - 1; if (is_yuv420(fmt->color)) mod_y = ffs(variant->pix_align->org_h) - 1; else mod_y = ffs(variant->pix_align->org_h) - 2; if (V4L2_TYPE_IS_OUTPUT(f->type)) { min_w = variant->pix_min->org_w; min_h = variant->pix_min->org_h; } else { min_w = variant->pix_min->target_rot_dis_w; min_h = variant->pix_min->target_rot_dis_h; pix_mp->colorspace = ctx->out_colorspace; } pr_debug("mod_x: %d, mod_y: %d, max_w: %d, max_h = %d", mod_x, mod_y, max_w, max_h); /* To check if image size is modified to adjust parameter against hardware abilities */ tmp_w = pix_mp->width; tmp_h = pix_mp->height; v4l_bound_align_image(&pix_mp->width, min_w, max_w, mod_x, &pix_mp->height, min_h, max_h, mod_y, 0); if (tmp_w != pix_mp->width || tmp_h != pix_mp->height) pr_debug("Image size has been modified from %dx%d to %dx%d\n", tmp_w, tmp_h, pix_mp->width, pix_mp->height); pix_mp->num_planes = fmt->num_planes; if (V4L2_TYPE_IS_OUTPUT(f->type)) ctx->out_colorspace = pix_mp->colorspace; for (i = 0; i < pix_mp->num_planes; ++i) { struct v4l2_plane_pix_format *plane_fmt = &pix_mp->plane_fmt[i]; u32 bpl = plane_fmt->bytesperline; if (fmt->num_comp == 1 && /* Packed */ (bpl == 0 || (bpl * 8 / fmt->depth[i]) < pix_mp->width)) bpl = pix_mp->width * fmt->depth[i] / 8; if (fmt->num_comp > 1 && /* Planar */ (bpl == 0 || bpl < pix_mp->width)) bpl = pix_mp->width; if (i != 0 && fmt->num_comp == 3) bpl /= 2; plane_fmt->bytesperline = bpl; plane_fmt->sizeimage = max(pix_mp->width * pix_mp->height * fmt->depth[i] / 8, plane_fmt->sizeimage); pr_debug("[%d]: bpl: %d, sizeimage: %d", i, bpl, pix_mp->plane_fmt[i].sizeimage); } return 0; }
int gsc_try_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f) { struct gsc_dev *gsc = ctx->gsc_dev; struct gsc_variant *variant = gsc->variant; struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp; struct gsc_fmt *fmt; u32 max_w, max_h, mod_x, mod_y; u32 min_w, min_h, tmp_w, tmp_h; int i; gsc_dbg("user put w: %d, h: %d", pix_mp->width, pix_mp->height); fmt = find_fmt(&pix_mp->pixelformat, NULL, 0); if (!fmt) { gsc_err("pixelformat format (0x%X) invalid\n", pix_mp->pixelformat); return -EINVAL; } if (pix_mp->field == V4L2_FIELD_ANY) pix_mp->field = V4L2_FIELD_NONE; else if (pix_mp->field != V4L2_FIELD_NONE) { gsc_err("Not supported field order(%d)\n", pix_mp->field); return -EINVAL; } max_w = variant->pix_max->target_rot_dis_w; max_h = variant->pix_max->target_rot_dis_h; if (V4L2_TYPE_IS_OUTPUT(f->type)) { mod_x = ffs(variant->pix_align->org_w) - 1; if (is_yuv420(fmt->color)) mod_y = ffs(variant->pix_align->org_h) - 1; else mod_y = ffs(variant->pix_align->org_h) - 2; min_w = variant->pix_min->org_w; min_h = variant->pix_min->org_h; } else { mod_x = ffs(variant->pix_align->org_w) - 1; if (is_yuv420(fmt->color)) mod_y = ffs(variant->pix_align->org_h) - 1; else mod_y = ffs(variant->pix_align->org_h) - 2; min_w = variant->pix_min->target_rot_dis_w; min_h = variant->pix_min->target_rot_dis_h; } gsc_dbg("mod_x: %d, mod_y: %d, max_w: %d, max_h = %d", mod_x, mod_y, max_w, max_h); /* To check if image size is modified to adjust parameter against hardware abilities */ tmp_w = pix_mp->width; tmp_h = pix_mp->height; v4l_bound_align_image(&pix_mp->width, min_w, max_w, mod_x, &pix_mp->height, min_h, max_h, mod_y, 0); if (tmp_w != pix_mp->width || tmp_h != pix_mp->height) gsc_info("Image size has been modified from %dx%d to %dx%d", tmp_w, tmp_h, pix_mp->width, pix_mp->height); pix_mp->num_planes = fmt->num_planes; if (ctx->gsc_ctrls.csc_eq_mode->val) ctx->gsc_ctrls.csc_eq->val = (pix_mp->width >= 1280) ? 1 : 0; if (ctx->gsc_ctrls.csc_eq->val) /* HD */ pix_mp->colorspace = V4L2_COLORSPACE_REC709; else /* SD */ pix_mp->colorspace = V4L2_COLORSPACE_SMPTE170M; for (i = 0; i < pix_mp->num_planes; ++i) { int bpl = (pix_mp->width * fmt->depth[i]) >> 3; pix_mp->plane_fmt[i].bytesperline = bpl; pix_mp->plane_fmt[i].sizeimage = bpl * pix_mp->height; gsc_dbg("[%d]: bpl: %d, sizeimage: %d", i, bpl, pix_mp->plane_fmt[i].sizeimage); } return 0; }
static int rvin_s_selection(struct file *file, void *fh, struct v4l2_selection *s) { struct rvin_dev *vin = video_drvdata(file); const struct rvin_video_format *fmt; struct v4l2_rect r = s->r; struct v4l2_rect max_rect; struct v4l2_rect min_rect = { .width = 6, .height = 2, }; if (s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) return -EINVAL; v4l2_rect_set_min_size(&r, &min_rect); switch (s->target) { case V4L2_SEL_TGT_CROP: /* Can't crop outside of source input */ max_rect.top = max_rect.left = 0; max_rect.width = vin->source.width; max_rect.height = vin->source.height; v4l2_rect_map_inside(&r, &max_rect); v4l_bound_align_image(&r.width, 2, vin->source.width, 1, &r.height, 4, vin->source.height, 2, 0); r.top = clamp_t(s32, r.top, 0, vin->source.height - r.height); r.left = clamp_t(s32, r.left, 0, vin->source.width - r.width); vin->crop = s->r = r; vin_dbg(vin, "Cropped %dx%d@%d:%d of %dx%d\n", r.width, r.height, r.left, r.top, vin->source.width, vin->source.height); break; case V4L2_SEL_TGT_COMPOSE: /* Make sure compose rect fits inside output format */ max_rect.top = max_rect.left = 0; max_rect.width = vin->format.width; max_rect.height = vin->format.height; v4l2_rect_map_inside(&r, &max_rect); /* * Composing is done by adding a offset to the buffer address, * the HW wants this address to be aligned to HW_BUFFER_MASK. * Make sure the top and left values meets this requirement. */ while ((r.top * vin->format.bytesperline) & HW_BUFFER_MASK) r.top--; fmt = rvin_format_from_pixel(vin->format.pixelformat); while ((r.left * fmt->bpp) & HW_BUFFER_MASK) r.left--; vin->compose = s->r = r; vin_dbg(vin, "Compose %dx%d@%d:%d in %dx%d\n", r.width, r.height, r.left, r.top, vin->format.width, vin->format.height); break; default: return -EINVAL; } /* HW supports modifying configuration while running */ rvin_crop_scale_comp(vin); return 0; } static int rvin_cropcap(struct file *file, void *priv, struct v4l2_cropcap *crop) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); if (crop->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) return -EINVAL; return v4l2_subdev_call(sd, video, g_pixelaspect, &crop->pixelaspect); } static int rvin_enum_input(struct file *file, void *priv, struct v4l2_input *i) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); int ret; if (i->index != 0) return -EINVAL; ret = v4l2_subdev_call(sd, video, g_input_status, &i->status); if (ret < 0 && ret != -ENOIOCTLCMD && ret != -ENODEV) return ret; i->type = V4L2_INPUT_TYPE_CAMERA; i->std = vin->vdev.tvnorms; if (v4l2_subdev_has_op(sd, pad, dv_timings_cap)) i->capabilities = V4L2_IN_CAP_DV_TIMINGS; strlcpy(i->name, "Camera", sizeof(i->name)); return 0; } static int rvin_g_input(struct file *file, void *priv, unsigned int *i) { *i = 0; return 0; }
/* V4L2 specification suggests the driver corrects the format struct if any of * the dimensions is unsupported */ static int vidioc_try_fmt(struct v4l2_format *f, struct mtk_video_fmt *fmt) { struct v4l2_pix_format_mplane *pix_fmt_mp = &f->fmt.pix_mp; int i; pix_fmt_mp->field = V4L2_FIELD_NONE; if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) { pix_fmt_mp->num_planes = 1; pix_fmt_mp->plane_fmt[0].bytesperline = 0; } else if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) { int tmp_w, tmp_h; pix_fmt_mp->height = clamp(pix_fmt_mp->height, MTK_VENC_MIN_H, MTK_VENC_MAX_H); pix_fmt_mp->width = clamp(pix_fmt_mp->width, MTK_VENC_MIN_W, MTK_VENC_MAX_W); /* find next closer width align 16, heign align 32, size align * 64 rectangle */ tmp_w = pix_fmt_mp->width; tmp_h = pix_fmt_mp->height; v4l_bound_align_image(&pix_fmt_mp->width, MTK_VENC_MIN_W, MTK_VENC_MAX_W, 4, &pix_fmt_mp->height, MTK_VENC_MIN_H, MTK_VENC_MAX_H, 5, 6); if (pix_fmt_mp->width < tmp_w && (pix_fmt_mp->width + 16) <= MTK_VENC_MAX_W) pix_fmt_mp->width += 16; if (pix_fmt_mp->height < tmp_h && (pix_fmt_mp->height + 32) <= MTK_VENC_MAX_H) pix_fmt_mp->height += 32; mtk_v4l2_debug(0, "before resize width=%d, height=%d, after resize width=%d, height=%d, sizeimage=%d %d", tmp_w, tmp_h, pix_fmt_mp->width, pix_fmt_mp->height, pix_fmt_mp->plane_fmt[0].sizeimage, pix_fmt_mp->plane_fmt[1].sizeimage); pix_fmt_mp->num_planes = fmt->num_planes; pix_fmt_mp->plane_fmt[0].sizeimage = pix_fmt_mp->width * pix_fmt_mp->height + ((ALIGN(pix_fmt_mp->width, 16) * 2) * 16); pix_fmt_mp->plane_fmt[0].bytesperline = pix_fmt_mp->width; if (pix_fmt_mp->num_planes == 2) { pix_fmt_mp->plane_fmt[1].sizeimage = (pix_fmt_mp->width * pix_fmt_mp->height) / 2 + (ALIGN(pix_fmt_mp->width, 16) * 16); pix_fmt_mp->plane_fmt[2].sizeimage = 0; pix_fmt_mp->plane_fmt[1].bytesperline = pix_fmt_mp->width; pix_fmt_mp->plane_fmt[2].bytesperline = 0; } else if (pix_fmt_mp->num_planes == 3) { pix_fmt_mp->plane_fmt[1].sizeimage = pix_fmt_mp->plane_fmt[2].sizeimage = (pix_fmt_mp->width * pix_fmt_mp->height) / 4 + ((ALIGN(pix_fmt_mp->width, 16) / 2) * 16); pix_fmt_mp->plane_fmt[1].bytesperline = pix_fmt_mp->plane_fmt[2].bytesperline = pix_fmt_mp->width / 2; } } for (i = 0; i < pix_fmt_mp->num_planes; i++) memset(&(pix_fmt_mp->plane_fmt[i].reserved[0]), 0x0, sizeof(pix_fmt_mp->plane_fmt[0].reserved)); pix_fmt_mp->flags = 0; memset(&pix_fmt_mp->reserved, 0x0, sizeof(pix_fmt_mp->reserved)); return 0; }
static struct fimc_fmt *fimc_capture_try_format(struct fimc_ctx *ctx, u32 *width, u32 *height, u32 *code, u32 *fourcc, int pad) { bool rotation = ctx->rotation == 90 || ctx->rotation == 270; struct fimc_dev *fimc = ctx->fimc_dev; struct samsung_fimc_variant *var = fimc->variant; struct fimc_pix_limit *pl = var->pix_limit; struct fimc_frame *dst = &ctx->d_frame; u32 depth, min_w, max_w, min_h, align_h = 3; u32 mask = FMT_FLAGS_CAM; struct fimc_fmt *ffmt; /* Color conversion from/to JPEG is not supported */ if (code && ctx->s_frame.fmt && pad == FIMC_SD_PAD_SOURCE && fimc_fmt_is_jpeg(ctx->s_frame.fmt->color)) *code = V4L2_MBUS_FMT_JPEG_1X8; if (fourcc && *fourcc != V4L2_PIX_FMT_JPEG && pad != FIMC_SD_PAD_SINK) mask |= FMT_FLAGS_M2M; ffmt = fimc_find_format(fourcc, code, mask, 0); if (WARN_ON(!ffmt)) return NULL; if (code) *code = ffmt->mbus_code; if (fourcc) *fourcc = ffmt->fourcc; if (pad == FIMC_SD_PAD_SINK) { max_w = fimc_fmt_is_jpeg(ffmt->color) ? pl->scaler_dis_w : pl->scaler_en_w; /* Apply the camera input interface pixel constraints */ v4l_bound_align_image(width, max_t(u32, *width, 32), max_w, 4, height, max_t(u32, *height, 32), FIMC_CAMIF_MAX_HEIGHT, fimc_fmt_is_jpeg(ffmt->color) ? 3 : 1, 0); return ffmt; } /* Can't scale or crop in transparent (JPEG) transfer mode */ if (fimc_fmt_is_jpeg(ffmt->color)) { *width = ctx->s_frame.f_width; *height = ctx->s_frame.f_height; return ffmt; } /* Apply the scaler and the output DMA constraints */ max_w = rotation ? pl->out_rot_en_w : pl->out_rot_dis_w; min_w = ctx->state & FIMC_DST_CROP ? dst->width : var->min_out_pixsize; min_h = ctx->state & FIMC_DST_CROP ? dst->height : var->min_out_pixsize; if (var->min_vsize_align == 1 && !rotation) align_h = fimc_fmt_is_rgb(ffmt->color) ? 0 : 1; depth = fimc_get_format_depth(ffmt); v4l_bound_align_image(width, min_w, max_w, ffs(var->min_out_pixsize) - 1, height, min_h, FIMC_CAMIF_MAX_HEIGHT, align_h, 64/(ALIGN(depth, 8))); dbg("pad%d: code: 0x%x, %dx%d. dst fmt: %dx%d", pad, code ? *code : 0, *width, *height, dst->f_width, dst->f_height); return ffmt; }
static int rvin_reset_format(struct rvin_dev *vin) { struct v4l2_subdev_format fmt = { .which = V4L2_SUBDEV_FORMAT_ACTIVE, .pad = vin->parallel->source_pad, }; int ret; ret = v4l2_subdev_call(vin_to_source(vin), pad, get_fmt, NULL, &fmt); if (ret) return ret; v4l2_fill_pix_format(&vin->format, &fmt.format); rvin_format_align(vin, &vin->format); vin->source.top = 0; vin->source.left = 0; vin->source.width = vin->format.width; vin->source.height = vin->format.height; vin->crop = vin->source; vin->compose = vin->source; return 0; } static int rvin_try_format(struct rvin_dev *vin, u32 which, struct v4l2_pix_format *pix, struct v4l2_rect *crop, struct v4l2_rect *compose) { struct v4l2_subdev *sd = vin_to_source(vin); struct v4l2_subdev_pad_config *pad_cfg; struct v4l2_subdev_format format = { .which = which, .pad = vin->parallel->source_pad, }; enum v4l2_field field; u32 width, height; int ret; pad_cfg = v4l2_subdev_alloc_pad_config(sd); if (pad_cfg == NULL) return -ENOMEM; if (!rvin_format_from_pixel(pix->pixelformat) || (vin->info->model == RCAR_M1 && pix->pixelformat == V4L2_PIX_FMT_XBGR32)) pix->pixelformat = RVIN_DEFAULT_FORMAT; v4l2_fill_mbus_format(&format.format, pix, vin->mbus_code); /* Allow the video device to override field and to scale */ field = pix->field; width = pix->width; height = pix->height; ret = v4l2_subdev_call(sd, pad, set_fmt, pad_cfg, &format); if (ret < 0 && ret != -ENOIOCTLCMD) goto done; v4l2_fill_pix_format(pix, &format.format); if (crop) { crop->top = 0; crop->left = 0; crop->width = pix->width; crop->height = pix->height; /* * If source is ALTERNATE the driver will use the VIN hardware * to INTERLACE it. The crop height then needs to be doubled. */ if (pix->field == V4L2_FIELD_ALTERNATE) crop->height *= 2; } if (field != V4L2_FIELD_ANY) pix->field = field; pix->width = width; pix->height = height; rvin_format_align(vin, pix); if (compose) { compose->top = 0; compose->left = 0; compose->width = pix->width; compose->height = pix->height; } done: v4l2_subdev_free_pad_config(pad_cfg); return 0; } static int rvin_querycap(struct file *file, void *priv, struct v4l2_capability *cap) { struct rvin_dev *vin = video_drvdata(file); strscpy(cap->driver, KBUILD_MODNAME, sizeof(cap->driver)); strscpy(cap->card, "R_Car_VIN", sizeof(cap->card)); snprintf(cap->bus_info, sizeof(cap->bus_info), "platform:%s", dev_name(vin->dev)); return 0; } static int rvin_try_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f) { struct rvin_dev *vin = video_drvdata(file); return rvin_try_format(vin, V4L2_SUBDEV_FORMAT_TRY, &f->fmt.pix, NULL, NULL); } static int rvin_s_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_rect crop, compose; int ret; if (vb2_is_busy(&vin->queue)) return -EBUSY; ret = rvin_try_format(vin, V4L2_SUBDEV_FORMAT_ACTIVE, &f->fmt.pix, &crop, &compose); if (ret) return ret; vin->format = f->fmt.pix; vin->crop = crop; vin->compose = compose; vin->source = crop; return 0; } static int rvin_g_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f) { struct rvin_dev *vin = video_drvdata(file); f->fmt.pix = vin->format; return 0; } static int rvin_enum_fmt_vid_cap(struct file *file, void *priv, struct v4l2_fmtdesc *f) { if (f->index >= ARRAY_SIZE(rvin_formats)) return -EINVAL; f->pixelformat = rvin_formats[f->index].fourcc; return 0; } static int rvin_g_selection(struct file *file, void *fh, struct v4l2_selection *s) { struct rvin_dev *vin = video_drvdata(file); if (s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) return -EINVAL; switch (s->target) { case V4L2_SEL_TGT_CROP_BOUNDS: case V4L2_SEL_TGT_CROP_DEFAULT: s->r.left = s->r.top = 0; s->r.width = vin->source.width; s->r.height = vin->source.height; break; case V4L2_SEL_TGT_CROP: s->r = vin->crop; break; case V4L2_SEL_TGT_COMPOSE_BOUNDS: case V4L2_SEL_TGT_COMPOSE_DEFAULT: s->r.left = s->r.top = 0; s->r.width = vin->format.width; s->r.height = vin->format.height; break; case V4L2_SEL_TGT_COMPOSE: s->r = vin->compose; break; default: return -EINVAL; } return 0; } static int rvin_s_selection(struct file *file, void *fh, struct v4l2_selection *s) { struct rvin_dev *vin = video_drvdata(file); const struct rvin_video_format *fmt; struct v4l2_rect r = s->r; struct v4l2_rect max_rect; struct v4l2_rect min_rect = { .width = 6, .height = 2, }; if (s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) return -EINVAL; v4l2_rect_set_min_size(&r, &min_rect); switch (s->target) { case V4L2_SEL_TGT_CROP: /* Can't crop outside of source input */ max_rect.top = max_rect.left = 0; max_rect.width = vin->source.width; max_rect.height = vin->source.height; v4l2_rect_map_inside(&r, &max_rect); v4l_bound_align_image(&r.width, 6, vin->source.width, 0, &r.height, 2, vin->source.height, 0, 0); r.top = clamp_t(s32, r.top, 0, vin->source.height - r.height); r.left = clamp_t(s32, r.left, 0, vin->source.width - r.width); vin->crop = s->r = r; vin_dbg(vin, "Cropped %dx%d@%d:%d of %dx%d\n", r.width, r.height, r.left, r.top, vin->source.width, vin->source.height); break; case V4L2_SEL_TGT_COMPOSE: /* Make sure compose rect fits inside output format */ max_rect.top = max_rect.left = 0; max_rect.width = vin->format.width; max_rect.height = vin->format.height; v4l2_rect_map_inside(&r, &max_rect); /* * Composing is done by adding a offset to the buffer address, * the HW wants this address to be aligned to HW_BUFFER_MASK. * Make sure the top and left values meets this requirement. */ while ((r.top * vin->format.bytesperline) & HW_BUFFER_MASK) r.top--; fmt = rvin_format_from_pixel(vin->format.pixelformat); while ((r.left * fmt->bpp) & HW_BUFFER_MASK) r.left--; vin->compose = s->r = r; vin_dbg(vin, "Compose %dx%d@%d:%d in %dx%d\n", r.width, r.height, r.left, r.top, vin->format.width, vin->format.height); break; default: return -EINVAL; } /* HW supports modifying configuration while running */ rvin_crop_scale_comp(vin); return 0; } static int rvin_g_pixelaspect(struct file *file, void *priv, int type, struct v4l2_fract *f) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); if (type != V4L2_BUF_TYPE_VIDEO_CAPTURE) return -EINVAL; return v4l2_subdev_call(sd, video, g_pixelaspect, f); } static int rvin_enum_input(struct file *file, void *priv, struct v4l2_input *i) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); int ret; if (i->index != 0) return -EINVAL; ret = v4l2_subdev_call(sd, video, g_input_status, &i->status); if (ret < 0 && ret != -ENOIOCTLCMD && ret != -ENODEV) return ret; i->type = V4L2_INPUT_TYPE_CAMERA; if (v4l2_subdev_has_op(sd, pad, dv_timings_cap)) { i->capabilities = V4L2_IN_CAP_DV_TIMINGS; i->std = 0; } else { i->capabilities = V4L2_IN_CAP_STD; i->std = vin->vdev.tvnorms; } strscpy(i->name, "Camera", sizeof(i->name)); return 0; } static int rvin_g_input(struct file *file, void *priv, unsigned int *i) { *i = 0; return 0; } static int rvin_s_input(struct file *file, void *priv, unsigned int i) { if (i > 0) return -EINVAL; return 0; } static int rvin_querystd(struct file *file, void *priv, v4l2_std_id *a) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); return v4l2_subdev_call(sd, video, querystd, a); } static int rvin_s_std(struct file *file, void *priv, v4l2_std_id a) { struct rvin_dev *vin = video_drvdata(file); int ret; ret = v4l2_subdev_call(vin_to_source(vin), video, s_std, a); if (ret < 0) return ret; vin->std = a; /* Changing the standard will change the width/height */ return rvin_reset_format(vin); } static int rvin_g_std(struct file *file, void *priv, v4l2_std_id *a) { struct rvin_dev *vin = video_drvdata(file); if (v4l2_subdev_has_op(vin_to_source(vin), pad, dv_timings_cap)) return -ENOIOCTLCMD; *a = vin->std; return 0; } static int rvin_subscribe_event(struct v4l2_fh *fh, const struct v4l2_event_subscription *sub) { switch (sub->type) { case V4L2_EVENT_SOURCE_CHANGE: return v4l2_event_subscribe(fh, sub, 4, NULL); } return v4l2_ctrl_subscribe_event(fh, sub); } static int rvin_enum_dv_timings(struct file *file, void *priv_fh, struct v4l2_enum_dv_timings *timings) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); int ret; if (timings->pad) return -EINVAL; timings->pad = vin->parallel->sink_pad; ret = v4l2_subdev_call(sd, pad, enum_dv_timings, timings); timings->pad = 0; return ret; } static int rvin_s_dv_timings(struct file *file, void *priv_fh, struct v4l2_dv_timings *timings) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); int ret; ret = v4l2_subdev_call(sd, video, s_dv_timings, timings); if (ret) return ret; /* Changing the timings will change the width/height */ return rvin_reset_format(vin); } static int rvin_g_dv_timings(struct file *file, void *priv_fh, struct v4l2_dv_timings *timings) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); return v4l2_subdev_call(sd, video, g_dv_timings, timings); } static int rvin_query_dv_timings(struct file *file, void *priv_fh, struct v4l2_dv_timings *timings) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); return v4l2_subdev_call(sd, video, query_dv_timings, timings); } static int rvin_dv_timings_cap(struct file *file, void *priv_fh, struct v4l2_dv_timings_cap *cap) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); int ret; if (cap->pad) return -EINVAL; cap->pad = vin->parallel->sink_pad; ret = v4l2_subdev_call(sd, pad, dv_timings_cap, cap); cap->pad = 0; return ret; } static int rvin_g_edid(struct file *file, void *fh, struct v4l2_edid *edid) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); int ret; if (edid->pad) return -EINVAL; edid->pad = vin->parallel->sink_pad; ret = v4l2_subdev_call(sd, pad, get_edid, edid); edid->pad = 0; return ret; } static int rvin_s_edid(struct file *file, void *fh, struct v4l2_edid *edid) { struct rvin_dev *vin = video_drvdata(file); struct v4l2_subdev *sd = vin_to_source(vin); int ret; if (edid->pad) return -EINVAL; edid->pad = vin->parallel->sink_pad; ret = v4l2_subdev_call(sd, pad, set_edid, edid); edid->pad = 0; return ret; } static const struct v4l2_ioctl_ops rvin_ioctl_ops = { .vidioc_querycap = rvin_querycap, .vidioc_try_fmt_vid_cap = rvin_try_fmt_vid_cap, .vidioc_g_fmt_vid_cap = rvin_g_fmt_vid_cap, .vidioc_s_fmt_vid_cap = rvin_s_fmt_vid_cap, .vidioc_enum_fmt_vid_cap = rvin_enum_fmt_vid_cap, .vidioc_g_selection = rvin_g_selection, .vidioc_s_selection = rvin_s_selection, .vidioc_g_pixelaspect = rvin_g_pixelaspect, .vidioc_enum_input = rvin_enum_input, .vidioc_g_input = rvin_g_input, .vidioc_s_input = rvin_s_input, .vidioc_dv_timings_cap = rvin_dv_timings_cap, .vidioc_enum_dv_timings = rvin_enum_dv_timings, .vidioc_g_dv_timings = rvin_g_dv_timings, .vidioc_s_dv_timings = rvin_s_dv_timings, .vidioc_query_dv_timings = rvin_query_dv_timings, .vidioc_g_edid = rvin_g_edid, .vidioc_s_edid = rvin_s_edid, .vidioc_querystd = rvin_querystd, .vidioc_g_std = rvin_g_std, .vidioc_s_std = rvin_s_std, .vidioc_reqbufs = vb2_ioctl_reqbufs, .vidioc_create_bufs = vb2_ioctl_create_bufs, .vidioc_querybuf = vb2_ioctl_querybuf, .vidioc_qbuf = vb2_ioctl_qbuf, .vidioc_dqbuf = vb2_ioctl_dqbuf, .vidioc_expbuf = vb2_ioctl_expbuf, .vidioc_prepare_buf = vb2_ioctl_prepare_buf, .vidioc_streamon = vb2_ioctl_streamon, .vidioc_streamoff = vb2_ioctl_streamoff, .vidioc_log_status = v4l2_ctrl_log_status, .vidioc_subscribe_event = rvin_subscribe_event, .vidioc_unsubscribe_event = v4l2_event_unsubscribe, }; /* ----------------------------------------------------------------------------- * V4L2 Media Controller */ static void rvin_mc_try_format(struct rvin_dev *vin, struct v4l2_pix_format *pix) { /* * The V4L2 specification clearly documents the colorspace fields * as being set by drivers for capture devices. Using the values * supplied by userspace thus wouldn't comply with the API. Until * the API is updated force fixed vaules. */ pix->colorspace = RVIN_DEFAULT_COLORSPACE; pix->xfer_func = V4L2_MAP_XFER_FUNC_DEFAULT(pix->colorspace); pix->ycbcr_enc = V4L2_MAP_YCBCR_ENC_DEFAULT(pix->colorspace); pix->quantization = V4L2_MAP_QUANTIZATION_DEFAULT(true, pix->colorspace, pix->ycbcr_enc); rvin_format_align(vin, pix); } static int rvin_mc_try_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f) { struct rvin_dev *vin = video_drvdata(file); rvin_mc_try_format(vin, &f->fmt.pix); return 0; } static int rvin_mc_s_fmt_vid_cap(struct file *file, void *priv, struct v4l2_format *f) { struct rvin_dev *vin = video_drvdata(file); if (vb2_is_busy(&vin->queue)) return -EBUSY; rvin_mc_try_format(vin, &f->fmt.pix); vin->format = f->fmt.pix; vin->crop.top = 0; vin->crop.left = 0; vin->crop.width = vin->format.width; vin->crop.height = vin->format.height; vin->compose = vin->crop; return 0; } static int rvin_mc_enum_input(struct file *file, void *priv, struct v4l2_input *i) { if (i->index != 0) return -EINVAL; i->type = V4L2_INPUT_TYPE_CAMERA; strscpy(i->name, "Camera", sizeof(i->name)); return 0; } static const struct v4l2_ioctl_ops rvin_mc_ioctl_ops = { .vidioc_querycap = rvin_querycap, .vidioc_try_fmt_vid_cap = rvin_mc_try_fmt_vid_cap, .vidioc_g_fmt_vid_cap = rvin_g_fmt_vid_cap, .vidioc_s_fmt_vid_cap = rvin_mc_s_fmt_vid_cap, .vidioc_enum_fmt_vid_cap = rvin_enum_fmt_vid_cap, .vidioc_enum_input = rvin_mc_enum_input, .vidioc_g_input = rvin_g_input, .vidioc_s_input = rvin_s_input, .vidioc_reqbufs = vb2_ioctl_reqbufs, .vidioc_create_bufs = vb2_ioctl_create_bufs, .vidioc_querybuf = vb2_ioctl_querybuf, .vidioc_qbuf = vb2_ioctl_qbuf, .vidioc_dqbuf = vb2_ioctl_dqbuf, .vidioc_expbuf = vb2_ioctl_expbuf, .vidioc_prepare_buf = vb2_ioctl_prepare_buf, .vidioc_streamon = vb2_ioctl_streamon, .vidioc_streamoff = vb2_ioctl_streamoff, .vidioc_log_status = v4l2_ctrl_log_status, .vidioc_subscribe_event = rvin_subscribe_event, .vidioc_unsubscribe_event = v4l2_event_unsubscribe, }; /* ----------------------------------------------------------------------------- * File Operations */ static int rvin_power_on(struct rvin_dev *vin) { int ret; struct v4l2_subdev *sd = vin_to_source(vin); pm_runtime_get_sync(vin->v4l2_dev.dev); ret = v4l2_subdev_call(sd, core, s_power, 1); if (ret < 0 && ret != -ENOIOCTLCMD && ret != -ENODEV) return ret; return 0; }
static void fimc_capture_try_crop(struct fimc_ctx *ctx, struct v4l2_rect *r, int pad) { bool rotate = ctx->rotation == 90 || ctx->rotation == 270; struct fimc_dev *fimc = ctx->fimc_dev; struct samsung_fimc_variant *var = fimc->variant; struct fimc_pix_limit *pl = var->pix_limit; struct fimc_frame *sink = &ctx->s_frame; u32 max_w, max_h, min_w = 0, min_h = 0, min_sz; u32 align_sz = 0, align_h = 4; u32 max_sc_h, max_sc_v; /* In JPEG transparent transfer mode cropping is not supported */ if (fimc_fmt_is_jpeg(ctx->d_frame.fmt->color)) { r->width = sink->f_width; r->height = sink->f_height; r->left = r->top = 0; return; } if (pad == FIMC_SD_PAD_SOURCE) { if (ctx->rotation != 90 && ctx->rotation != 270) align_h = 1; max_sc_h = min(SCALER_MAX_HRATIO, 1 << (ffs(sink->width) - 3)); max_sc_v = min(SCALER_MAX_VRATIO, 1 << (ffs(sink->height) - 1)); min_sz = var->min_out_pixsize; } else { u32 depth = fimc_get_format_depth(sink->fmt); align_sz = 64/ALIGN(depth, 8); min_sz = var->min_inp_pixsize; min_w = min_h = min_sz; max_sc_h = max_sc_v = 1; } /* * For the crop rectangle at source pad the following constraints * must be met: * - it must fit in the sink pad format rectangle (f_width/f_height); * - maximum downscaling ratio is 64; * - maximum crop size depends if the rotator is used or not; * - the sink pad format width/height must be 4 multiple of the * prescaler ratios determined by sink pad size and source pad crop, * the prescaler ratio is returned by fimc_get_scaler_factor(). */ max_w = min_t(u32, rotate ? pl->out_rot_en_w : pl->out_rot_dis_w, rotate ? sink->f_height : sink->f_width); max_h = min_t(u32, FIMC_CAMIF_MAX_HEIGHT, sink->f_height); if (pad == FIMC_SD_PAD_SOURCE) { min_w = min_t(u32, max_w, sink->f_width / max_sc_h); min_h = min_t(u32, max_h, sink->f_height / max_sc_v); if (rotate) { swap(max_sc_h, max_sc_v); swap(min_w, min_h); } } v4l_bound_align_image(&r->width, min_w, max_w, ffs(min_sz) - 1, &r->height, min_h, max_h, align_h, align_sz); /* Adjust left/top if cropping rectangle is out of bounds */ r->left = clamp_t(u32, r->left, 0, sink->f_width - r->width); r->top = clamp_t(u32, r->top, 0, sink->f_height - r->height); r->left = round_down(r->left, var->hor_offs_align); dbg("pad%d: (%d,%d)/%dx%d, sink fmt: %dx%d", pad, r->left, r->top, r->width, r->height, sink->f_width, sink->f_height); }