Пример #1
0
static int vidioc_enum_framesizes(struct file *file, void *priv,
				struct v4l2_frmsizeenum *fsize)
{
	int i = 0;
	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);

	if (fsize->index != 0)
		return -EINVAL;

	for (i = 0; i < NUM_SUPPORTED_FRAMESIZE; ++i) {
		if (fsize->pixel_format != mtk_vdec_framesizes[i].fourcc)
			continue;

		fsize->type = V4L2_FRMSIZE_TYPE_STEPWISE;
		fsize->stepwise = mtk_vdec_framesizes[i].stepwise;
		if (!(ctx->dev->dec_capability &
				VCODEC_CAPABILITY_4K_DISABLED)) {
			mtk_v4l2_debug(3, "4K is enabled");
			fsize->stepwise.max_width =
					VCODEC_DEC_4K_CODED_WIDTH;
			fsize->stepwise.max_height =
					VCODEC_DEC_4K_CODED_HEIGHT;
		}
		mtk_v4l2_debug(1, "%x, %d %d %d %d %d %d",
				ctx->dev->dec_capability,
				fsize->stepwise.min_width,
				fsize->stepwise.max_width,
				fsize->stepwise.step_width,
				fsize->stepwise.min_height,
				fsize->stepwise.max_height,
				fsize->stepwise.step_height);
		return 0;
	}

	return -EINVAL;
}
Пример #2
0
static int gsc_m2m_g_selection(struct file *file, void *fh,
			struct v4l2_selection *s)
{
	struct gsc_frame *frame;
	struct gsc_ctx *ctx = fh_to_ctx(fh);

	if ((s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) &&
	    (s->type != V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE))
		return -EINVAL;

	frame = ctx_get_frame(ctx, s->type);
	if (IS_ERR(frame))
		return PTR_ERR(frame);

	switch (s->target) {
	case V4L2_SEL_TGT_COMPOSE_DEFAULT:
	case V4L2_SEL_TGT_COMPOSE_BOUNDS:
	case V4L2_SEL_TGT_CROP_BOUNDS:
	case V4L2_SEL_TGT_CROP_DEFAULT:
		s->r.left = 0;
		s->r.top = 0;
		s->r.width = frame->f_width;
		s->r.height = frame->f_height;
		return 0;

	case V4L2_SEL_TGT_COMPOSE:
	case V4L2_SEL_TGT_CROP:
		s->r.left = frame->crop.left;
		s->r.top = frame->crop.top;
		s->r.width = frame->crop.width;
		s->r.height = frame->crop.height;
		return 0;
	}

	return -EINVAL;
}
Пример #3
0
/* Dequeue a buffer */
static int vidioc_dqbuf(struct file *file, void *priv, struct v4l2_buffer *buf)
{
	const struct v4l2_event ev = {
		.type = V4L2_EVENT_EOS
	};
	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
	int ret;

	if (ctx->state == MFCINST_ERROR) {
		mfc_err("Call on DQBUF after unrecoverable error\n");
		return -EIO;
	}
	if (buf->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
		ret = vb2_dqbuf(&ctx->vq_src, buf, file->f_flags & O_NONBLOCK);
	else if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
		ret = vb2_dqbuf(&ctx->vq_dst, buf, file->f_flags & O_NONBLOCK);
		if (ret == 0 && ctx->state == MFCINST_FINISHED &&
				list_empty(&ctx->vq_dst.done_list))
			v4l2_event_queue_fh(&ctx->fh, &ev);
	} else {
		ret = -EINVAL;
	}
	return ret;
}
Пример #4
0
static int gsc_m2m_expbuf(struct file *file, void *fh,
				struct v4l2_exportbuffer *eb)
{
	struct gsc_ctx *ctx = fh_to_ctx(fh);
	return v4l2_m2m_expbuf(file, ctx->m2m_ctx, eb);
}
Пример #5
0
static int vidioc_venc_s_fmt_out(struct file *file, void *priv,
			     struct v4l2_format *f)
{
	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
	struct vb2_queue *vq;
	struct mtk_q_data *q_data;
	int ret, i;
	struct mtk_video_fmt *fmt;
	struct v4l2_pix_format_mplane *pix_fmt_mp = &f->fmt.pix_mp;

	vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
	if (!vq) {
		mtk_v4l2_err("fail to get vq");
		return -EINVAL;
	}

	if (vb2_is_busy(vq)) {
		mtk_v4l2_err("queue busy");
		return -EBUSY;
	}

	q_data = mtk_venc_get_q_data(ctx, f->type);
	if (!q_data) {
		mtk_v4l2_err("fail to get q data");
		return -EINVAL;
	}

	fmt = mtk_venc_find_format(f);
	if (!fmt) {
		f->fmt.pix.pixelformat = mtk_video_formats[OUT_FMT_IDX].fourcc;
		fmt = mtk_venc_find_format(f);
	}

	pix_fmt_mp->height = clamp(pix_fmt_mp->height,
				MTK_VENC_MIN_H,
				MTK_VENC_MAX_H);
	pix_fmt_mp->width = clamp(pix_fmt_mp->width,
				MTK_VENC_MIN_W,
				MTK_VENC_MAX_W);

	q_data->visible_width = f->fmt.pix_mp.width;
	q_data->visible_height = f->fmt.pix_mp.height;
	q_data->fmt = fmt;
	ret = vidioc_try_fmt(f, q_data->fmt);
	if (ret)
		return ret;

	q_data->coded_width = f->fmt.pix_mp.width;
	q_data->coded_height = f->fmt.pix_mp.height;

	q_data->field = f->fmt.pix_mp.field;
	ctx->colorspace = f->fmt.pix_mp.colorspace;
	ctx->ycbcr_enc = f->fmt.pix_mp.ycbcr_enc;
	ctx->quantization = f->fmt.pix_mp.quantization;
	ctx->xfer_func = f->fmt.pix_mp.xfer_func;

	for (i = 0; i < f->fmt.pix_mp.num_planes; i++) {
		struct v4l2_plane_pix_format *plane_fmt;

		plane_fmt = &f->fmt.pix_mp.plane_fmt[i];
		q_data->bytesperline[i] = plane_fmt->bytesperline;
		q_data->sizeimage[i] = plane_fmt->sizeimage;
	}

	return 0;
}
Пример #6
0
static int vidioc_venc_s_fmt_cap(struct file *file, void *priv,
			     struct v4l2_format *f)
{
	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
	struct vb2_queue *vq;
	struct mtk_q_data *q_data;
	int i, ret;
	struct mtk_video_fmt *fmt;

	vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
	if (!vq) {
		mtk_v4l2_err("fail to get vq");
		return -EINVAL;
	}

	if (vb2_is_busy(vq)) {
		mtk_v4l2_err("queue busy");
		return -EBUSY;
	}

	q_data = mtk_venc_get_q_data(ctx, f->type);
	if (!q_data) {
		mtk_v4l2_err("fail to get q data");
		return -EINVAL;
	}

	fmt = mtk_venc_find_format(f);
	if (!fmt) {
		f->fmt.pix.pixelformat = mtk_video_formats[CAP_FMT_IDX].fourcc;
		fmt = mtk_venc_find_format(f);
	}

	q_data->fmt = fmt;
	ret = vidioc_try_fmt(f, q_data->fmt);
	if (ret)
		return ret;

	q_data->coded_width = f->fmt.pix_mp.width;
	q_data->coded_height = f->fmt.pix_mp.height;
	q_data->field = f->fmt.pix_mp.field;

	for (i = 0; i < f->fmt.pix_mp.num_planes; i++) {
		struct v4l2_plane_pix_format	*plane_fmt;

		plane_fmt = &f->fmt.pix_mp.plane_fmt[i];
		q_data->bytesperline[i]	= plane_fmt->bytesperline;
		q_data->sizeimage[i] = plane_fmt->sizeimage;
	}

	if (ctx->state == MTK_STATE_FREE) {
		ret = venc_if_init(ctx, q_data->fmt->fourcc);
		if (ret) {
			mtk_v4l2_err("venc_if_init failed=%d, codec type=%x",
					ret, q_data->fmt->fourcc);
			return -EBUSY;
		}
		ctx->state = MTK_STATE_INIT;
	}

	return 0;
}
Пример #7
0
static int fimc_m2m_reqbufs(struct file *file, void *fh,
                            struct v4l2_requestbuffers *reqbufs)
{
    struct fimc_ctx *ctx = fh_to_ctx(fh);
    return v4l2_m2m_reqbufs(file, ctx->m2m_ctx, reqbufs);
}
Пример #8
0
static int fimc_m2m_try_fmt_mplane(struct file *file, void *fh,
                                   struct v4l2_format *f)
{
    struct fimc_ctx *ctx = fh_to_ctx(fh);
    return fimc_try_fmt_mplane(ctx, f);
}
Пример #9
0
/* Set format */
static int vidioc_s_fmt(struct file *file, void *priv, struct v4l2_format *f)
{
	struct s5p_mfc_dev *dev = video_drvdata(file);
	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
	int ret = 0;
	struct s5p_mfc_fmt *fmt;
	struct v4l2_pix_format_mplane *pix_mp;

	mfc_debug_enter();
	ret = vidioc_try_fmt(file, priv, f);
	pix_mp = &f->fmt.pix_mp;
	if (ret)
		return ret;
	if (ctx->vq_src.streaming || ctx->vq_dst.streaming) {
		v4l2_err(&dev->v4l2_dev, "%s queue busy\n", __func__);
		ret = -EBUSY;
		goto out;
	}
	if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
		fmt = find_format(f, MFC_FMT_RAW);
		if (!fmt) {
			mfc_err("Unsupported format for source.\n");
			return -EINVAL;
		}
		if (!IS_MFCV6(dev) && (fmt->fourcc != V4L2_PIX_FMT_NV12MT)) {
			mfc_err("Not supported format.\n");
			return -EINVAL;
		} else if (IS_MFCV6(dev) &&
				(fmt->fourcc == V4L2_PIX_FMT_NV12MT)) {
			mfc_err("Not supported format.\n");
			return -EINVAL;
		}
		ctx->dst_fmt = fmt;
		mfc_debug_leave();
		return ret;
	} else if (f->type != V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
		mfc_err("Wrong type error for S_FMT : %d", f->type);
		return -EINVAL;
	}
	fmt = find_format(f, MFC_FMT_DEC);
	if (!fmt || fmt->codec_mode == S5P_MFC_CODEC_NONE) {
		mfc_err("Unknown codec\n");
		ret = -EINVAL;
		goto out;
	}
	if (fmt->type != MFC_FMT_DEC) {
		mfc_err("Wrong format selected, you should choose "
					"format for decoding\n");
		ret = -EINVAL;
		goto out;
	}
	if (!IS_MFCV6(dev) && (fmt->fourcc == V4L2_PIX_FMT_VP8)) {
		mfc_err("Not supported format.\n");
		return -EINVAL;
	}
	ctx->src_fmt = fmt;
	ctx->codec_mode = fmt->codec_mode;
	mfc_debug(2, "The codec number is: %d\n", ctx->codec_mode);
	pix_mp->height = 0;
	pix_mp->width = 0;
	if (pix_mp->plane_fmt[0].sizeimage)
		ctx->dec_src_buf_size = pix_mp->plane_fmt[0].sizeimage;
	else
		pix_mp->plane_fmt[0].sizeimage = ctx->dec_src_buf_size =
								DEF_CPB_SIZE;
	pix_mp->plane_fmt[0].bytesperline = 0;
	ctx->state = MFCINST_INIT;
out:
	mfc_debug_leave();
	return ret;
}
Пример #10
0
static int gsc_m2m_s_selection(struct file *file, void *fh,
				struct v4l2_selection *s)
{
	struct gsc_frame *frame;
	struct gsc_ctx *ctx = fh_to_ctx(fh);
	struct gsc_variant *variant = ctx->gsc_dev->variant;
	struct v4l2_selection sel = *s;
	int ret;

	if ((s->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) &&
	    (s->type != V4L2_BUF_TYPE_VIDEO_OUTPUT))
		return -EINVAL;

	ret = gsc_try_selection(ctx, &sel);
	if (ret)
		return ret;

	if (s->flags & V4L2_SEL_FLAG_LE &&
	    !is_rectangle_enclosed(&sel.r, &s->r))
		return -ERANGE;

	if (s->flags & V4L2_SEL_FLAG_GE &&
	    !is_rectangle_enclosed(&s->r, &sel.r))
		return -ERANGE;

	s->r = sel.r;

	switch (s->target) {
	case V4L2_SEL_TGT_COMPOSE_BOUNDS:
	case V4L2_SEL_TGT_COMPOSE_DEFAULT:
	case V4L2_SEL_TGT_COMPOSE:
		frame = &ctx->s_frame;
		break;

	case V4L2_SEL_TGT_CROP_BOUNDS:
	case V4L2_SEL_TGT_CROP:
	case V4L2_SEL_TGT_CROP_DEFAULT:
		frame = &ctx->d_frame;
		break;

	default:
		return -EINVAL;
	}

	/* Check to see if scaling ratio is within supported range */
	if (gsc_ctx_state_is_set(GSC_DST_FMT | GSC_SRC_FMT, ctx)) {
		if (s->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
			ret = gsc_check_scaler_ratio(variant, sel.r.width,
				sel.r.height, ctx->d_frame.crop.width,
				ctx->d_frame.crop.height,
				ctx->gsc_ctrls.rotate->val, ctx->out_path);
		} else {
			ret = gsc_check_scaler_ratio(variant,
				ctx->s_frame.crop.width,
				ctx->s_frame.crop.height, sel.r.width,
				sel.r.height, ctx->gsc_ctrls.rotate->val,
				ctx->out_path);
		}

		if (ret) {
			pr_err("Out of scaler range");
			return -EINVAL;
		}
	}

	frame->crop = sel.r;

	gsc_ctx_state_lock_set(GSC_PARAMS, ctx);
	return 0;
}
Пример #11
0
static int bdisp_s_selection(struct file *file, void *fh,
			     struct v4l2_selection *s)
{
	struct bdisp_frame *frame;
	struct bdisp_ctx *ctx = fh_to_ctx(fh);
	struct v4l2_rect *in, out;
	bool valid = false;

	if ((s->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) &&
	    (s->target == V4L2_SEL_TGT_CROP))
		valid = true;

	if ((s->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) &&
	    (s->target == V4L2_SEL_TGT_COMPOSE))
		valid = true;

	if (!valid) {
		dev_err(ctx->bdisp_dev->dev, "Invalid type / target\n");
		return -EINVAL;
	}

	frame = ctx_get_frame(ctx, s->type);
	if (IS_ERR(frame)) {
		dev_err(ctx->bdisp_dev->dev, "Invalid frame (%p)\n", frame);
		return PTR_ERR(frame);
	}

	in = &s->r;
	out = *in;

	/* Align and check origin */
	out.left = ALIGN(in->left, frame->fmt->w_align);
	out.top = ALIGN(in->top, frame->fmt->h_align);

	if ((out.left < 0) || (out.left >= frame->width) ||
	    (out.top < 0) || (out.top >= frame->height)) {
		dev_err(ctx->bdisp_dev->dev,
			"Invalid crop: %dx%d@(%d,%d) vs frame: %dx%d\n",
			out.width, out.height, out.left, out.top,
			frame->width, frame->height);
		return -EINVAL;
	}

	/* Align and check size */
	out.width = ALIGN(in->width, frame->fmt->w_align);
	out.height = ALIGN(in->height, frame->fmt->w_align);

	if (((out.left + out.width) > frame->width) ||
	    ((out.top + out.height) > frame->height)) {
		dev_err(ctx->bdisp_dev->dev,
			"Invalid crop: %dx%d@(%d,%d) vs frame: %dx%d\n",
			out.width, out.height, out.left, out.top,
			frame->width, frame->height);
		return -EINVAL;
	}

	/* Checks adjust constraints flags */
	if (s->flags & V4L2_SEL_FLAG_LE && !is_rect_enclosed(&out, in))
		return -ERANGE;

	if (s->flags & V4L2_SEL_FLAG_GE && !is_rect_enclosed(in, &out))
		return -ERANGE;

	if ((out.left != in->left) || (out.top != in->top) ||
	    (out.width != in->width) || (out.height != in->height)) {
		dev_dbg(ctx->bdisp_dev->dev,
			"%s crop updated: %dx%d@(%d,%d) -> %dx%d@(%d,%d)\n",
			__func__, in->width, in->height, in->left, in->top,
			out.width, out.height, out.left, out.top);
		*in = out;
	}

	frame->crop = out;

	bdisp_ctx_state_lock_set(BDISP_PARAMS, ctx);

	return 0;
}
Пример #12
0
static int bdisp_g_selection(struct file *file, void *fh,
			     struct v4l2_selection *s)
{
	struct bdisp_frame *frame;
	struct bdisp_ctx *ctx = fh_to_ctx(fh);

	frame = ctx_get_frame(ctx, s->type);
	if (IS_ERR(frame)) {
		dev_err(ctx->bdisp_dev->dev, "Invalid frame (%p)\n", frame);
		return PTR_ERR(frame);
	}

	switch (s->type) {
	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
		switch (s->target) {
		case V4L2_SEL_TGT_CROP:
			/* cropped frame */
			s->r = frame->crop;
			break;
		case V4L2_SEL_TGT_CROP_DEFAULT:
		case V4L2_SEL_TGT_CROP_BOUNDS:
			/* complete frame */
			s->r.left = 0;
			s->r.top = 0;
			s->r.width = frame->width;
			s->r.height = frame->height;
			break;
		default:
			dev_err(ctx->bdisp_dev->dev, "Invalid target\n");
			return -EINVAL;
		}
		break;

	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
		switch (s->target) {
		case V4L2_SEL_TGT_COMPOSE:
		case V4L2_SEL_TGT_COMPOSE_PADDED:
			/* composed (cropped) frame */
			s->r = frame->crop;
			break;
		case V4L2_SEL_TGT_COMPOSE_DEFAULT:
		case V4L2_SEL_TGT_COMPOSE_BOUNDS:
			/* complete frame */
			s->r.left = 0;
			s->r.top = 0;
			s->r.width = frame->width;
			s->r.height = frame->height;
			break;
		default:
			dev_err(ctx->bdisp_dev->dev, "Invalid target\n");
			return -EINVAL;
		}
		break;

	default:
		dev_err(ctx->bdisp_dev->dev, "Invalid type\n");
		return -EINVAL;
	}

	return 0;
}
Пример #13
0
static int vidioc_vdec_g_fmt(struct file *file, void *priv,
			     struct v4l2_format *f)
{
	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
	struct v4l2_pix_format_mplane *pix_mp = &f->fmt.pix_mp;
	struct vb2_queue *vq;
	struct mtk_q_data *q_data;

	vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
	if (!vq) {
		mtk_v4l2_err("no vb2 queue for type=%d", f->type);
		return -EINVAL;
	}

	q_data = mtk_vdec_get_q_data(ctx, f->type);

	pix_mp->field = V4L2_FIELD_NONE;
	pix_mp->colorspace = ctx->colorspace;
	pix_mp->ycbcr_enc = ctx->ycbcr_enc;
	pix_mp->quantization = ctx->quantization;
	pix_mp->xfer_func = ctx->xfer_func;

	if ((f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) &&
	    (ctx->state >= MTK_STATE_HEADER)) {
		/* Until STREAMOFF is called on the CAPTURE queue
		 * (acknowledging the event), the driver operates as if
		 * the resolution hasn't changed yet.
		 * So we just return picinfo yet, and update picinfo in
		 * stop_streaming hook function
		 */
		q_data->sizeimage[0] = ctx->picinfo.y_bs_sz +
					ctx->picinfo.y_len_sz;
		q_data->sizeimage[1] = ctx->picinfo.c_bs_sz +
					ctx->picinfo.c_len_sz;
		q_data->bytesperline[0] = ctx->last_decoded_picinfo.buf_w;
		q_data->bytesperline[1] = ctx->last_decoded_picinfo.buf_w;
		q_data->coded_width = ctx->picinfo.buf_w;
		q_data->coded_height = ctx->picinfo.buf_h;

		/*
		 * Width and height are set to the dimensions
		 * of the movie, the buffer is bigger and
		 * further processing stages should crop to this
		 * rectangle.
		 */
		pix_mp->width = q_data->coded_width;
		pix_mp->height = q_data->coded_height;

		/*
		 * Set pixelformat to the format in which mt vcodec
		 * outputs the decoded frame
		 */
		pix_mp->num_planes = q_data->fmt->num_planes;
		pix_mp->pixelformat = q_data->fmt->fourcc;
		pix_mp->plane_fmt[0].bytesperline = q_data->bytesperline[0];
		pix_mp->plane_fmt[0].sizeimage = q_data->sizeimage[0];
		pix_mp->plane_fmt[1].bytesperline = q_data->bytesperline[1];
		pix_mp->plane_fmt[1].sizeimage = q_data->sizeimage[1];

	} else if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
		/*
		 * This is run on OUTPUT
		 * The buffer contains compressed image
		 * so width and height have no meaning.
		 * Assign value here to pass v4l2-compliance test
		 */
		pix_mp->width = q_data->visible_width;
		pix_mp->height = q_data->visible_height;
		pix_mp->plane_fmt[0].bytesperline = q_data->bytesperline[0];
		pix_mp->plane_fmt[0].sizeimage = q_data->sizeimage[0];
		pix_mp->pixelformat = q_data->fmt->fourcc;
		pix_mp->num_planes = q_data->fmt->num_planes;
	} else {
		pix_mp->width = q_data->coded_width;
		pix_mp->height = q_data->coded_height;
		pix_mp->num_planes = q_data->fmt->num_planes;
		pix_mp->pixelformat = q_data->fmt->fourcc;
		pix_mp->plane_fmt[0].bytesperline = q_data->bytesperline[0];
		pix_mp->plane_fmt[0].sizeimage = q_data->sizeimage[0];
		pix_mp->plane_fmt[1].bytesperline = q_data->bytesperline[1];
		pix_mp->plane_fmt[1].sizeimage = q_data->sizeimage[1];

		mtk_v4l2_debug(1, "[%d] type=%d state=%d Format information could not be read, not ready yet!",
				ctx->id, f->type, ctx->state);
	}

	return 0;
}
Пример #14
0
static int vidioc_vdec_s_fmt(struct file *file, void *priv,
			     struct v4l2_format *f)
{
	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
	struct v4l2_pix_format_mplane *pix_mp;
	struct mtk_q_data *q_data;
	int ret = 0;
	struct mtk_video_fmt *fmt;

	mtk_v4l2_debug(3, "[%d]", ctx->id);

	q_data = mtk_vdec_get_q_data(ctx, f->type);
	if (!q_data)
		return -EINVAL;

	pix_mp = &f->fmt.pix_mp;
	if ((f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) &&
	    vb2_is_busy(&ctx->m2m_ctx->out_q_ctx.q)) {
		mtk_v4l2_err("out_q_ctx buffers already requested");
		ret = -EBUSY;
	}

	if ((f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) &&
	    vb2_is_busy(&ctx->m2m_ctx->cap_q_ctx.q)) {
		mtk_v4l2_err("cap_q_ctx buffers already requested");
		ret = -EBUSY;
	}

	fmt = mtk_vdec_find_format(f);
	if (fmt == NULL) {
		if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
			f->fmt.pix.pixelformat =
				mtk_video_formats[OUT_FMT_IDX].fourcc;
			fmt = mtk_vdec_find_format(f);
		} else if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
			f->fmt.pix.pixelformat =
				mtk_video_formats[CAP_FMT_IDX].fourcc;
			fmt = mtk_vdec_find_format(f);
		}
	}

	q_data->fmt = fmt;
	vidioc_try_fmt(f, q_data->fmt);
	if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
		q_data->sizeimage[0] = pix_mp->plane_fmt[0].sizeimage;
		q_data->coded_width = pix_mp->width;
		q_data->coded_height = pix_mp->height;

		ctx->colorspace = f->fmt.pix_mp.colorspace;
		ctx->ycbcr_enc = f->fmt.pix_mp.ycbcr_enc;
		ctx->quantization = f->fmt.pix_mp.quantization;
		ctx->xfer_func = f->fmt.pix_mp.xfer_func;

		if (ctx->state == MTK_STATE_FREE) {
			ret = vdec_if_init(ctx, q_data->fmt->fourcc);
			if (ret) {
				mtk_v4l2_err("[%d]: vdec_if_init() fail ret=%d",
					ctx->id, ret);
				return -EINVAL;
			}
			ctx->state = MTK_STATE_INIT;
		}
	}

	return 0;
}
Пример #15
0
static int gsc_m2m_dqbuf(struct file *file, void *fh,
			   struct v4l2_buffer *buf)
{
	struct gsc_ctx *ctx = fh_to_ctx(fh);
	return v4l2_m2m_dqbuf(file, ctx->m2m_ctx, buf);
}
Пример #16
0
static int gsc_m2m_g_crop(struct file *file, void *fh, struct v4l2_crop *cr)
{
	struct gsc_ctx *ctx = fh_to_ctx(fh);

	return gsc_g_crop(ctx, cr);
}
Пример #17
0
static int gsc_m2m_streamoff(struct file *file, void *fh,
			    enum v4l2_buf_type type)
{
	struct gsc_ctx *ctx = fh_to_ctx(fh);
	return v4l2_m2m_streamoff(file, ctx->m2m_ctx, type);
}
Пример #18
0
static int gsc_m2m_mmap(struct file *file, struct vm_area_struct *vma)
{
	struct gsc_ctx *ctx = fh_to_ctx(file->private_data);

	return v4l2_m2m_mmap(file, ctx->m2m_ctx, vma);
}
Пример #19
0
/* Reqeust buffers */
static int vidioc_reqbufs(struct file *file, void *priv,
					  struct v4l2_requestbuffers *reqbufs)
{
	struct s5p_mfc_dev *dev = video_drvdata(file);
	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
	int ret = 0;

	if (reqbufs->memory != V4L2_MEMORY_MMAP) {
		mfc_err("Only V4L2_MEMORY_MAP is supported\n");
		return -EINVAL;
	}
	if (reqbufs->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
		/* Can only request buffers after an instance has been opened.*/
		if (ctx->state == MFCINST_INIT) {
			ctx->src_bufs_cnt = 0;
			if (reqbufs->count == 0) {
				mfc_debug(2, "Freeing buffers\n");
				s5p_mfc_clock_on();
				ret = vb2_reqbufs(&ctx->vq_src, reqbufs);
				s5p_mfc_clock_off();
				return ret;
			}
			/* Decoding */
			if (ctx->output_state != QUEUE_FREE) {
				mfc_err("Bufs have already been requested\n");
				return -EINVAL;
			}
			s5p_mfc_clock_on();
			ret = vb2_reqbufs(&ctx->vq_src, reqbufs);
			s5p_mfc_clock_off();
			if (ret) {
				mfc_err("vb2_reqbufs on output failed\n");
				return ret;
			}
			mfc_debug(2, "vb2_reqbufs: %d\n", ret);
			ctx->output_state = QUEUE_BUFS_REQUESTED;
		}
	} else if (reqbufs->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
		ctx->dst_bufs_cnt = 0;
		if (reqbufs->count == 0) {
			mfc_debug(2, "Freeing buffers\n");
			s5p_mfc_clock_on();
			ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
			s5p_mfc_clock_off();
			return ret;
		}
		if (ctx->capture_state != QUEUE_FREE) {
			mfc_err("Bufs have already been requested\n");
			return -EINVAL;
		}
		ctx->capture_state = QUEUE_BUFS_REQUESTED;
		s5p_mfc_clock_on();
		ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
		s5p_mfc_clock_off();
		if (ret) {
			mfc_err("vb2_reqbufs on capture failed\n");
			return ret;
		}
		if (reqbufs->count < ctx->dpb_count) {
			mfc_err("Not enough buffers allocated\n");
			reqbufs->count = 0;
			s5p_mfc_clock_on();
			ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
			s5p_mfc_clock_off();
			return -ENOMEM;
		}
		ctx->total_dpb_count = reqbufs->count;
		ret = s5p_mfc_hw_call(dev->mfc_ops, alloc_codec_buffers, ctx);
		if (ret) {
			mfc_err("Failed to allocate decoding buffers\n");
			reqbufs->count = 0;
			s5p_mfc_clock_on();
			ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
			s5p_mfc_clock_off();
			return -ENOMEM;
		}
		if (ctx->dst_bufs_cnt == ctx->total_dpb_count) {
			ctx->capture_state = QUEUE_BUFS_MMAPED;
		} else {
			mfc_err("Not all buffers passed to buf_init\n");
			reqbufs->count = 0;
			s5p_mfc_clock_on();
			ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
			s5p_mfc_hw_call(dev->mfc_ops, release_codec_buffers,
					ctx);
			s5p_mfc_clock_off();
			return -ENOMEM;
		}
		if (s5p_mfc_ctx_ready(ctx))
			set_work_bit_irqsave(ctx);
		s5p_mfc_hw_call(dev->mfc_ops, try_run, dev);
		s5p_mfc_wait_for_done_ctx(ctx,
					S5P_MFC_R2H_CMD_INIT_BUFFERS_RET, 0);
	}
	return ret;
}
Пример #20
0
static int hva_try_fmt_stream(struct file *file, void *priv,
			      struct v4l2_format *f)
{
	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
	struct device *dev = ctx_to_dev(ctx);
	struct v4l2_pix_format *pix = &f->fmt.pix;
	u32 streamformat = pix->pixelformat;
	const struct hva_enc *enc;
	u32 width, height;
	u32 stream_size;

	enc = hva_find_encoder(ctx, ctx->frameinfo.pixelformat, streamformat);
	if (!enc) {
		dev_dbg(dev,
			"%s V4L2 TRY_FMT (CAPTURE): unsupported format %.4s\n",
			ctx->name, (char *)&pix->pixelformat);
		return -EINVAL;
	}

	width = pix->width;
	height = pix->height;
	if (ctx->flags & HVA_FLAG_FRAMEINFO) {
		/*
		 * if the frame resolution is already fixed, only allow the
		 * same stream resolution
		 */
		pix->width = ctx->frameinfo.width;
		pix->height = ctx->frameinfo.height;
		if ((pix->width != width) || (pix->height != height))
			dev_dbg(dev,
				"%s V4L2 TRY_FMT (CAPTURE): resolution updated %dx%d -> %dx%d to fit frame resolution\n",
				ctx->name, width, height,
				pix->width, pix->height);
	} else {
		/* adjust width & height */
		v4l_bound_align_image(&pix->width,
				      HVA_MIN_WIDTH, enc->max_width,
				      0,
				      &pix->height,
				      HVA_MIN_HEIGHT, enc->max_height,
				      0,
				      0);

		if ((pix->width != width) || (pix->height != height))
			dev_dbg(dev,
				"%s V4L2 TRY_FMT (CAPTURE): resolution updated %dx%d -> %dx%d to fit min/max/alignment\n",
				ctx->name, width, height,
				pix->width, pix->height);
	}

	stream_size = estimated_stream_size(pix->width, pix->height);
	if (pix->sizeimage < stream_size)
		pix->sizeimage = stream_size;

	pix->bytesperline = 0;
	pix->colorspace = ctx->colorspace;
	pix->xfer_func = ctx->xfer_func;
	pix->ycbcr_enc = ctx->ycbcr_enc;
	pix->quantization = ctx->quantization;
	pix->field = V4L2_FIELD_NONE;

	return 0;
}