static int vidioc_venc_g_fmt(struct file *file, void *priv, struct v4l2_format *f) { struct v4l2_pix_format_mplane *pix = &f->fmt.pix_mp; struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv); struct vb2_queue *vq; struct mtk_q_data *q_data; int i; vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type); if (!vq) return -EINVAL; q_data = mtk_venc_get_q_data(ctx, f->type); pix->width = q_data->coded_width; pix->height = q_data->coded_height; pix->pixelformat = q_data->fmt->fourcc; pix->field = q_data->field; pix->num_planes = q_data->fmt->num_planes; for (i = 0; i < pix->num_planes; i++) { pix->plane_fmt[i].bytesperline = q_data->bytesperline[i]; pix->plane_fmt[i].sizeimage = q_data->sizeimage[i]; memset(&(pix->plane_fmt[i].reserved[0]), 0x0, sizeof(pix->plane_fmt[i].reserved)); } pix->flags = 0; pix->colorspace = ctx->colorspace; pix->ycbcr_enc = ctx->ycbcr_enc; pix->quantization = ctx->quantization; pix->xfer_func = ctx->xfer_func; return 0; }
static int s5p_jpeg_g_fmt(struct file *file, void *priv, struct v4l2_format *f) { struct vb2_queue *vq; struct s5p_jpeg_q_data *q_data = NULL; struct v4l2_pix_format *pix = &f->fmt.pix; struct s5p_jpeg_ctx *ct = fh_to_ctx(priv); vq = v4l2_m2m_get_vq(ct->m2m_ctx, f->type); if (!vq) return -EINVAL; if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE && ct->mode == S5P_JPEG_DECODE && !ct->hdr_parsed) return -EINVAL; q_data = get_q_data(ct, f->type); BUG_ON(q_data == NULL); pix->width = q_data->w; pix->height = q_data->h; pix->field = V4L2_FIELD_NONE; pix->pixelformat = q_data->fmt->fourcc; pix->bytesperline = 0; if (q_data->fmt->fourcc != V4L2_PIX_FMT_JPEG) { u32 bpl = q_data->w; if (q_data->fmt->colplanes == 1) bpl = (bpl * q_data->fmt->depth) >> 3; pix->bytesperline = bpl; }
static int hva_s_fmt_stream(struct file *file, void *fh, struct v4l2_format *f) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct device *dev = ctx_to_dev(ctx); struct vb2_queue *vq; int ret; ret = hva_try_fmt_stream(file, fh, f); if (ret) { dev_dbg(dev, "%s V4L2 S_FMT (CAPTURE): unsupported format %.4s\n", ctx->name, (char *)&f->fmt.pix.pixelformat); return ret; } vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type); if (vb2_is_streaming(vq)) { dev_dbg(dev, "%s V4L2 S_FMT (CAPTURE): queue busy\n", ctx->name); return -EBUSY; } ctx->max_stream_size = f->fmt.pix.sizeimage; ctx->streaminfo.width = f->fmt.pix.width; ctx->streaminfo.height = f->fmt.pix.height; ctx->streaminfo.streamformat = f->fmt.pix.pixelformat; ctx->flags |= HVA_FLAG_STREAMINFO; return 0; }
static int hva_qbuf(struct file *file, void *priv, struct v4l2_buffer *buf) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct device *dev = ctx_to_dev(ctx); if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) { /* * depending on the targeted compressed video format, the * capture buffer might contain headers (e.g. H.264 SPS/PPS) * filled in by the driver client; the size of these data is * copied from the bytesused field of the V4L2 buffer in the * payload field of the hva stream buffer */ struct vb2_queue *vq; struct hva_stream *stream; vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, buf->type); if (buf->index >= vq->num_buffers) { dev_dbg(dev, "%s buffer index %d out of range (%d)\n", ctx->name, buf->index, vq->num_buffers); return -EINVAL; } stream = (struct hva_stream *)vq->bufs[buf->index]; stream->bytesused = buf->bytesused; } return v4l2_m2m_qbuf(file, ctx->fh.m2m_ctx, buf); }
static int nxp_video_streamon(struct file *file, void *fh, enum v4l2_buf_type i) { int ret; u32 pad; struct nxp_video *me = file->private_data; struct v4l2_subdev *subdev = _get_remote_subdev(me, i, &pad); void *hostdata_back; vmsg("%s: me %p, %s\n", __func__, me, me->name); if (me->vbq) { ret = vb2_streamon(me->vbq, i); if (ret < 0) { pr_err("%s: failed to vb2_streamon()\n", __func__); return ret; } } else { struct vb2_queue *vq = v4l2_m2m_get_vq(me->m2m_ctx, i); ret = vb2_streamon(vq, i); if (ret < 0) { pr_err("%s: m2m, failed to vb2_streamon()\n", __func__); return ret; } } /* for mlc subdev */ hostdata_back = v4l2_get_subdev_hostdata(subdev); v4l2_set_subdev_hostdata(subdev, me->name); ret = v4l2_subdev_call(subdev, video, s_stream, 1); v4l2_set_subdev_hostdata(subdev, hostdata_back); return ret; }
/** * v4l2_m2m_reqbufs() - multi-queue-aware REQBUFS multiplexer */ int v4l2_m2m_reqbufs(struct file *file, struct v4l2_m2m_ctx *m2m_ctx, struct v4l2_requestbuffers *reqbufs) { struct vb2_queue *vq; vq = v4l2_m2m_get_vq(m2m_ctx, reqbufs->type); return vb2_reqbufs(vq, reqbufs); }
/** * v4l2_m2m_dqbuf() - dequeue a source or destination buffer, depending on * the type */ int v4l2_m2m_dqbuf(struct file *file, struct v4l2_m2m_ctx *m2m_ctx, struct v4l2_buffer *buf) { struct vb2_queue *vq; vq = v4l2_m2m_get_vq(m2m_ctx, buf->type); return vb2_dqbuf(vq, buf, file->f_flags & O_NONBLOCK); }
/** * v4l2_m2m_expbuf() - export a source or destination buffer, depending on * the type */ int v4l2_m2m_expbuf(struct file *file, struct v4l2_m2m_ctx *m2m_ctx, struct v4l2_exportbuffer *eb) { struct vb2_queue *vq; vq = v4l2_m2m_get_vq(m2m_ctx, eb->type); return vb2_expbuf(vq, eb); }
/** * v4l2_m2m_create_bufs() - create a source or destination buffer, depending * on the type */ int v4l2_m2m_create_bufs(struct file *file, struct v4l2_m2m_ctx *m2m_ctx, struct v4l2_create_buffers *create) { struct vb2_queue *vq; vq = v4l2_m2m_get_vq(m2m_ctx, create->format.type); return vb2_create_bufs(vq, create); }
/** * v4l2_m2m_streamoff() - turn off streaming for a video queue */ int v4l2_m2m_streamoff(struct file *file, struct v4l2_m2m_ctx *m2m_ctx, enum v4l2_buf_type type) { struct vb2_queue *vq; vq = v4l2_m2m_get_vq(m2m_ctx, type); return vb2_streamoff(vq, type); }
static int jpeg_enc_vidioc_s_fmt_out(struct file *file, void *priv, struct v4l2_format *f) { struct jpeg_ctx *ctx = priv; struct vb2_queue *vq; struct v4l2_pix_format_mplane *pix; struct jpeg_fmt *fmt; struct jpeg_frame *frame; int ret; int i; ret = jpeg_enc_vidioc_try_fmt(file, priv, f); if (ret) return ret; vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type); if (!vq) return -EINVAL; if (vb2_is_busy(vq)) { v4l2_err(&ctx->jpeg_dev->v4l2_dev, "queue (%d) busy\n", f->type); return -EBUSY; } /* TODO: width & height has to be multiple of two */ pix = &f->fmt.pix_mp; fmt = find_format(f); frame = ctx_get_frame(ctx, f->type); if (IS_ERR(frame)) return PTR_ERR(frame); frame->jpeg_fmt = fmt; if (!frame->jpeg_fmt) { v4l2_err(&ctx->jpeg_dev->v4l2_dev, "not supported format values\n"); return -EINVAL; } for (i = 0; i < fmt->memplanes; i++) { ctx->payload[i] = pix->plane_fmt[i].bytesperline * pix->height; ctx->param.enc_param.in_depth[i] = fmt->depth[i]; } frame->width = pix->width; frame->height = pix->height; frame->pixelformat = pix->pixelformat; ctx->param.enc_param.in_width = pix->width; ctx->param.enc_param.in_height = pix->height; ctx->param.enc_param.in_plane = fmt->memplanes; ctx->param.enc_param.in_fmt = fmt->color; return 0; }
static int jpeg_dec_m2m_reqbufs(struct file *file, void *priv, struct v4l2_requestbuffers *reqbufs) { struct jpeg_ctx *ctx = priv; struct vb2_queue *vq; vq = v4l2_m2m_get_vq(ctx->m2m_ctx, reqbufs->type); return v4l2_m2m_reqbufs(file, ctx->m2m_ctx, reqbufs); }
static int nxp_video_querybuf(struct file *file, void *fh, struct v4l2_buffer *b) { struct nxp_video *me = file->private_data; if (me->vbq) { return vb2_querybuf(me->vbq, b); } else { struct vb2_queue *vq = v4l2_m2m_get_vq(me->m2m_ctx, b->type); return vb2_querybuf(vq, b); } }
static int fimc_m2m_s_fmt_mplane(struct file *file, void *fh, struct v4l2_format *f) { struct fimc_ctx *ctx = fh_to_ctx(fh); struct fimc_dev *fimc = ctx->fimc_dev; struct vb2_queue *vq; struct fimc_frame *frame; struct v4l2_pix_format_mplane *pix; int i, ret = 0; ret = fimc_try_fmt_mplane(ctx, f); if (ret) return ret; vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type); if (vb2_is_busy(vq)) { v4l2_err(fimc->m2m.vfd, "queue (%d) busy\n", f->type); return -EBUSY; } if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) frame = &ctx->s_frame; else frame = &ctx->d_frame; pix = &f->fmt.pix_mp; frame->fmt = fimc_find_format(&pix->pixelformat, NULL, get_m2m_fmt_flags(f->type), 0); if (!frame->fmt) return -EINVAL; /* Update RGB Alpha control state and value range */ fimc_alpha_ctrl_update(ctx); for (i = 0; i < frame->fmt->colplanes; i++) { frame->payload[i] = (pix->width * pix->height * frame->fmt->depth[i]) / 8; } fimc_fill_frame(frame, f); ctx->scaler.enabled = 1; if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) fimc_ctx_state_set(FIMC_PARAMS | FIMC_DST_FMT, ctx); else fimc_ctx_state_set(FIMC_PARAMS | FIMC_SRC_FMT, ctx); dbg("f_w: %d, f_h: %d", frame->f_width, frame->f_height); return 0; }
static int nxp_video_dqbuf(struct file *file, void *fh, struct v4l2_buffer *b) { struct nxp_video *me = file->private_data; pr_debug("%s\n", __func__); if (me->vbq) { return vb2_dqbuf(me->vbq, b, file->f_flags & O_NONBLOCK); } else { struct vb2_queue *vq = v4l2_m2m_get_vq(me->m2m_ctx, b->type); return vb2_dqbuf(vq, b, file->f_flags & O_NONBLOCK); } }
static int nxp_video_qbuf(struct file *file, void *fh, struct v4l2_buffer *b) { struct nxp_video *me = file->private_data; pr_debug("%s: %s, index %d\n", __func__, me->name, b->index); if (me->vbq) { return vb2_qbuf(me->vbq, b); } else { struct vb2_queue *vq = v4l2_m2m_get_vq(me->m2m_ctx, b->type); return vb2_qbuf(vq, b); } }
static int nxp_video_reqbufs(struct file *file, void *fh, struct v4l2_requestbuffers *b) { struct nxp_video *me = file->private_data; pr_debug("%s: %s\n", __func__, me->name); if (me->vbq) { /* capture, out */ return vb2_reqbufs(me->vbq, b); /* call to queue_setup */ } else { /* m2m */ struct vb2_queue *vq = v4l2_m2m_get_vq(me->m2m_ctx, b->type); return vb2_reqbufs(vq, b); } }
static int vidioc_decoder_cmd(struct file *file, void *priv, struct v4l2_decoder_cmd *cmd) { struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv); struct vb2_queue *src_vq, *dst_vq; int ret; ret = vidioc_try_decoder_cmd(file, priv, cmd); if (ret) return ret; mtk_v4l2_debug(1, "decoder cmd=%u", cmd->cmd); dst_vq = v4l2_m2m_get_vq(ctx->m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); switch (cmd->cmd) { case V4L2_DEC_CMD_STOP: src_vq = v4l2_m2m_get_vq(ctx->m2m_ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE); if (!vb2_is_streaming(src_vq)) { mtk_v4l2_debug(1, "Output stream is off. No need to flush."); return 0; } if (!vb2_is_streaming(dst_vq)) { mtk_v4l2_debug(1, "Capture stream is off. No need to flush."); return 0; } v4l2_m2m_buf_queue(ctx->m2m_ctx, &ctx->empty_flush_buf->vb); v4l2_m2m_try_schedule(ctx->m2m_ctx); break; case V4L2_DEC_CMD_START: vb2_clear_last_buffer_dequeued(dst_vq); break; default: return -EINVAL; } return 0; }
/** * v4l2_m2m_streamon() - turn on streaming for a video queue */ int v4l2_m2m_streamon(struct file *file, struct v4l2_m2m_ctx *m2m_ctx, enum v4l2_buf_type type) { struct vb2_queue *vq; int ret; vq = v4l2_m2m_get_vq(m2m_ctx, type); ret = vb2_streamon(vq, type); if (!ret) v4l2_m2m_try_schedule(m2m_ctx); return ret; }
/** * v4l2_m2m_qbuf() - enqueue a source or destination buffer, depending on * the type */ int v4l2_m2m_qbuf(struct file *file, struct v4l2_m2m_ctx *m2m_ctx, struct v4l2_buffer *buf) { struct vb2_queue *vq; int ret; vq = v4l2_m2m_get_vq(m2m_ctx, buf->type); ret = vb2_qbuf(vq, buf); if (!ret) v4l2_m2m_try_schedule(m2m_ctx); return ret; }
static int rot_v4l2_s_fmt_mplane(struct file *file, void *priv, struct v4l2_format *f) { struct rot_ctx *ctx = priv; struct vb2_queue *vq; struct rot_frame *frame; struct v4l2_pix_format_mplane *pixm = &f->fmt.pix_mp; int i, ret = 0; vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type); if (vb2_is_streaming(vq)) { rot_err("device is busy\n"); return -EBUSY; } ret = rot_v4l2_try_fmt_mplane(file, priv, f); if (ret < 0) return ret; frame = ctx_get_frame(ctx, f->type); if (IS_ERR(frame)) return PTR_ERR(frame); set_bit(CTX_PARAMS, &ctx->flags); frame->rot_fmt = rot_find_format(f); if (!frame->rot_fmt) { rot_err("not supported format values\n"); return -EINVAL; } rot_adjust_pixminfo(ctx, frame, pixm); frame->pix_mp.pixelformat = pixm->pixelformat; frame->pix_mp.width = pixm->width; frame->pix_mp.height = pixm->height; /* * Shouldn't call s_crop or g_crop before called g_fmt or s_fmt. * Let's assume that we can keep the order. */ frame->crop.width = pixm->width; frame->crop.height = pixm->height; for (i = 0; i < frame->rot_fmt->num_planes; ++i) frame->bytesused[i] = (pixm->width * pixm->height * frame->rot_fmt->bitperpixel[i]) >> 3; return 0; }
static int bdisp_s_fmt(struct file *file, void *fh, struct v4l2_format *f) { struct bdisp_ctx *ctx = fh_to_ctx(fh); struct vb2_queue *vq; struct bdisp_frame *frame; struct v4l2_pix_format *pix; int ret; u32 state; ret = bdisp_try_fmt(file, fh, f); if (ret) { dev_err(ctx->bdisp_dev->dev, "Cannot set format\n"); return ret; } vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type); if (vb2_is_streaming(vq)) { dev_err(ctx->bdisp_dev->dev, "queue (%d) busy\n", f->type); return -EBUSY; } frame = (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) ? &ctx->src : &ctx->dst; pix = &f->fmt.pix; frame->fmt = bdisp_find_fmt(pix->pixelformat); if (!frame->fmt) { dev_err(ctx->bdisp_dev->dev, "Unknown format 0x%x\n", pix->pixelformat); return -EINVAL; } frame->width = pix->width; frame->height = pix->height; frame->bytesperline = pix->bytesperline; frame->sizeimage = pix->sizeimage; frame->field = pix->field; if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) frame->colorspace = pix->colorspace; frame->crop.width = frame->width; frame->crop.height = frame->height; frame->crop.left = 0; frame->crop.top = 0; state = BDISP_PARAMS; state |= (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) ? BDISP_DST_FMT : BDISP_SRC_FMT; bdisp_ctx_state_lock_set(state, ctx); return 0; }
static int jpeg_dec_m2m_reqbufs(struct file *file, void *priv, struct v4l2_requestbuffers *reqbufs) { struct jpeg_ctx *ctx = priv; struct vb2_queue *vq; vq = v4l2_m2m_get_vq(ctx->m2m_ctx, reqbufs->type); if (vq->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) ctx->dev->vb2->set_cacheable(ctx->dev->alloc_ctx, ctx->input_cacheable); else if (vq->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) ctx->dev->vb2->set_cacheable(ctx->dev->alloc_ctx, ctx->output_cacheable); return v4l2_m2m_reqbufs(file, ctx->m2m_ctx, reqbufs); }
int v4l2_m2m_reqbufs(struct file *file, struct v4l2_m2m_ctx *m2m_ctx, struct v4l2_requestbuffers *reqbufs) { struct vb2_queue *vq; int ret; vq = v4l2_m2m_get_vq(m2m_ctx, reqbufs->type); ret = vb2_reqbufs(vq, reqbufs); /* If count == 0, then the owner has released all buffers and he is no longer owner of the queue. Otherwise we have an owner. */ if (ret == 0) vq->owner = reqbufs->count ? file->private_data : NULL; return ret; }
static int gsc_m2m_s_fmt_mplane(struct file *file, void *fh, struct v4l2_format *f) { struct gsc_ctx *ctx = fh_to_ctx(fh); struct vb2_queue *vq; struct gsc_frame *frame; struct v4l2_pix_format_mplane *pix; int i, ret = 0; ret = gsc_m2m_try_fmt_mplane(file, fh, f); if (ret) return ret; vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type); if (vb2_is_streaming(vq)) { pr_err("queue (%d) busy", f->type); return -EBUSY; } if (V4L2_TYPE_IS_OUTPUT(f->type)) frame = &ctx->s_frame; else frame = &ctx->d_frame; pix = &f->fmt.pix_mp; frame->fmt = find_fmt(&pix->pixelformat, NULL, 0); frame->colorspace = pix->colorspace; if (!frame->fmt) return -EINVAL; for (i = 0; i < frame->fmt->num_planes; i++) frame->payload[i] = pix->plane_fmt[i].sizeimage; gsc_set_frame_size(frame, pix->width, pix->height); if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) gsc_ctx_state_lock_set(GSC_PARAMS | GSC_DST_FMT, ctx); else gsc_ctx_state_lock_set(GSC_PARAMS | GSC_SRC_FMT, ctx); pr_debug("f_w: %d, f_h: %d", frame->f_width, frame->f_height); return 0; }
static int jpeg_dec_vidioc_s_fmt_out(struct file *file, void *priv, struct v4l2_format *f) { struct jpeg_ctx *ctx = priv; struct vb2_queue *vq; struct v4l2_pix_format_mplane *pix; struct jpeg_fmt *fmt; int ret; int i; ret = jpeg_dec_vidioc_try_fmt(file, priv, f); if (ret) return ret; vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type); if (!vq) return -EINVAL; if (vb2_is_busy(vq)) { v4l2_err(&ctx->dev->v4l2_dev, "queue (%d) busy\n", f->type); return -EBUSY; } /* TODO: width & height has to be multiple of two */ pix = &f->fmt.pix_mp; fmt = find_format(f); for (i = 0; i < fmt->memplanes; i++) ctx->payload[i] = pix->plane_fmt[i].bytesperline * pix->height; ctx->param.dec_param.in_width = pix->width; ctx->param.dec_param.in_height = pix->height; ctx->param.dec_param.in_plane = fmt->memplanes; ctx->param.dec_param.in_depth = fmt->depth[0]; ctx->param.dec_param.in_fmt = fmt->color; if((pix->plane_fmt[0].sizeimage % 32) == 0) ctx->param.dec_param.size = (pix->plane_fmt[0].sizeimage / 32); else ctx->param.dec_param.size = (pix->plane_fmt[0].sizeimage / 32) + 1; ctx->param.dec_param.mem_size = pix->plane_fmt[0].sizeimage; return 0; }
static int vidioc_g_fmt(struct file *file, void *prv, struct v4l2_format *f) { struct g2d_ctx *ctx = prv; struct vb2_queue *vq; struct g2d_frame *frm; vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type); if (!vq) return -EINVAL; frm = get_frame(ctx, f->type); if (IS_ERR(frm)) return PTR_ERR(frm); f->fmt.pix.width = frm->width; f->fmt.pix.height = frm->height; f->fmt.pix.field = V4L2_FIELD_NONE; f->fmt.pix.pixelformat = frm->fmt->fourcc; f->fmt.pix.bytesperline = (frm->width * frm->fmt->depth) >> 3; f->fmt.pix.sizeimage = frm->size; return 0; }
static int jpeg_enc_vidioc_s_fmt_cap(struct file *file, void *priv, struct v4l2_format *f) { struct jpeg_ctx *ctx = priv; struct vb2_queue *vq; struct v4l2_pix_format_mplane *pix; struct jpeg_fmt *fmt; int ret; int i; ret = jpeg_enc_vidioc_try_fmt(file, priv, f); if (ret) return ret; vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type); if (!vq) return -EINVAL; if (vb2_is_busy(vq)) { v4l2_err(&ctx->dev->v4l2_dev, "queue (%d) busy\n", f->type); return -EBUSY; } pix = &f->fmt.pix_mp; fmt = find_format(f); if (!fmt) return -EINVAL; for (i = 0; i < fmt->memplanes; i++) ctx->payload[i] = pix->plane_fmt[i].bytesperline * pix->height; ctx->param.enc_param.out_width = pix->height; ctx->param.enc_param.out_height = pix->width; ctx->param.enc_param.out_plane = fmt->memplanes; ctx->param.enc_param.out_depth = fmt->depth[0]; ctx->param.enc_param.out_fmt = fmt->color; return 0; }
static int nxp_video_streamoff(struct file *file, void *fh, enum v4l2_buf_type i) { int ret; u32 pad; struct nxp_video *me; struct v4l2_subdev *subdev; void *hostdata_back; me = file->private_data; subdev = _get_remote_subdev(me, i, &pad); if (me->vbq) { ret = vb2_streamoff(me->vbq, i); if (ret < 0) { printk(KERN_ERR "%s: failed to vb2_streamoff() %s\n", __func__, me->name); return 0; } } else { struct vb2_queue *vq = v4l2_m2m_get_vq(me->m2m_ctx, i); ret = vb2_streamoff(vq, i); if (ret < 0) { pr_err(KERN_ERR "%s: m2m, failed to vb2_streamoff() %s\n", __func__, me->name); return 0; } } vmsg("%s %s\n", __func__, me->name); hostdata_back = v4l2_get_subdev_hostdata(subdev); v4l2_set_subdev_hostdata(subdev, me->name); ret = v4l2_subdev_call(subdev, video, s_stream, 0); v4l2_set_subdev_hostdata(subdev, hostdata_back); vmsg("%s: %s exit\n", __func__, me->name); return ret; }
static int vidioc_s_fmt(struct file *file, void *prv, struct v4l2_format *f) { struct g2d_ctx *ctx = prv; struct g2d_dev *dev = ctx->dev; struct vb2_queue *vq; struct g2d_frame *frm; struct g2d_fmt *fmt; int ret = 0; /* Adjust all values accordingly to the hardware capabilities * and chosen format. */ ret = vidioc_try_fmt(file, prv, f); if (ret) return ret; vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type); if (vb2_is_busy(vq)) { v4l2_err(&dev->v4l2_dev, "queue (%d) bust\n", f->type); return -EBUSY; } frm = get_frame(ctx, f->type); if (IS_ERR(frm)) return PTR_ERR(frm); fmt = find_fmt(f); if (!fmt) return -EINVAL; frm->width = f->fmt.pix.width; frm->height = f->fmt.pix.height; frm->size = f->fmt.pix.sizeimage; /* Reset crop settings */ frm->o_width = 0; frm->o_height = 0; frm->c_width = frm->width; frm->c_height = frm->height; frm->right = frm->width; frm->bottom = frm->height; frm->fmt = fmt; frm->stride = f->fmt.pix.bytesperline; return 0; }