static int hva_release(struct file *file) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct hva_dev *hva = ctx_to_hdev(ctx); struct device *dev = ctx_to_dev(ctx); const struct hva_enc *enc = ctx->enc; if (enc) { dev_dbg(dev, "%s %s encoder closed\n", ctx->name, enc->name); enc->close(ctx); ctx->enc = NULL; /* clear instance context in instances array */ hva->instances[ctx->id] = NULL; hva->nb_of_instances--; } v4l2_m2m_ctx_release(ctx->fh.m2m_ctx); v4l2_ctrl_handler_free(&ctx->ctrl_handler); v4l2_fh_del(&ctx->fh); v4l2_fh_exit(&ctx->fh); dev_info(dev, "%s encoder instance released\n", ctx->name); kfree(ctx); return 0; }
static int hva_s_fmt_stream(struct file *file, void *fh, struct v4l2_format *f) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct device *dev = ctx_to_dev(ctx); struct vb2_queue *vq; int ret; ret = hva_try_fmt_stream(file, fh, f); if (ret) { dev_dbg(dev, "%s V4L2 S_FMT (CAPTURE): unsupported format %.4s\n", ctx->name, (char *)&f->fmt.pix.pixelformat); return ret; } vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type); if (vb2_is_streaming(vq)) { dev_dbg(dev, "%s V4L2 S_FMT (CAPTURE): queue busy\n", ctx->name); return -EBUSY; } ctx->max_stream_size = f->fmt.pix.sizeimage; ctx->streaminfo.width = f->fmt.pix.width; ctx->streaminfo.height = f->fmt.pix.height; ctx->streaminfo.streamformat = f->fmt.pix.pixelformat; ctx->flags |= HVA_FLAG_STREAMINFO; return 0; }
static int hva_qbuf(struct file *file, void *priv, struct v4l2_buffer *buf) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct device *dev = ctx_to_dev(ctx); if (buf->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) { /* * depending on the targeted compressed video format, the * capture buffer might contain headers (e.g. H.264 SPS/PPS) * filled in by the driver client; the size of these data is * copied from the bytesused field of the V4L2 buffer in the * payload field of the hva stream buffer */ struct vb2_queue *vq; struct hva_stream *stream; vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, buf->type); if (buf->index >= vq->num_buffers) { dev_dbg(dev, "%s buffer index %d out of range (%d)\n", ctx->name, buf->index, vq->num_buffers); return -EINVAL; } stream = (struct hva_stream *)vq->bufs[buf->index]; stream->bytesused = buf->bytesused; } return v4l2_m2m_qbuf(file, ctx->fh.m2m_ctx, buf); }
static void hva_job_abort(void *priv) { struct hva_ctx *ctx = priv; struct device *dev = ctx_to_dev(ctx); dev_dbg(dev, "%s aborting job\n", ctx->name); ctx->aborting = true; }
static int hva_buf_prepare(struct vb2_buffer *vb) { struct hva_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); struct device *dev = ctx_to_dev(ctx); struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb); if (vb->vb2_queue->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) { struct hva_frame *frame = to_hva_frame(vbuf); if (vbuf->field == V4L2_FIELD_ANY) vbuf->field = V4L2_FIELD_NONE; if (vbuf->field != V4L2_FIELD_NONE) { dev_dbg(dev, "%s frame[%d] prepare: %d field not supported\n", ctx->name, vb->index, vbuf->field); return -EINVAL; } if (!frame->prepared) { /* get memory addresses */ frame->vaddr = vb2_plane_vaddr(&vbuf->vb2_buf, 0); frame->paddr = vb2_dma_contig_plane_dma_addr( &vbuf->vb2_buf, 0); frame->info = ctx->frameinfo; frame->prepared = true; dev_dbg(dev, "%s frame[%d] prepared; virt=%p, phy=%pad\n", ctx->name, vb->index, frame->vaddr, &frame->paddr); } } else { struct hva_stream *stream = to_hva_stream(vbuf); if (!stream->prepared) { /* get memory addresses */ stream->vaddr = vb2_plane_vaddr(&vbuf->vb2_buf, 0); stream->paddr = vb2_dma_contig_plane_dma_addr( &vbuf->vb2_buf, 0); stream->size = vb2_plane_size(&vbuf->vb2_buf, 0); stream->prepared = true; dev_dbg(dev, "%s stream[%d] prepared; virt=%p, phy=%pad\n", ctx->name, vb->index, stream->vaddr, &stream->paddr); } } return 0; }
static int hva_try_fmt_frame(struct file *file, void *priv, struct v4l2_format *f) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct device *dev = ctx_to_dev(ctx); struct v4l2_pix_format *pix = &f->fmt.pix; u32 pixelformat = pix->pixelformat; const struct hva_enc *enc; u32 width, height; enc = hva_find_encoder(ctx, pixelformat, ctx->streaminfo.streamformat); if (!enc) { dev_dbg(dev, "%s V4L2 TRY_FMT (OUTPUT): unsupported format %.4s\n", ctx->name, (char *)&pixelformat); return -EINVAL; } /* adjust width & height */ width = pix->width; height = pix->height; v4l_bound_align_image(&pix->width, HVA_MIN_WIDTH, HVA_MAX_WIDTH, frame_alignment(pixelformat) - 1, &pix->height, HVA_MIN_HEIGHT, HVA_MAX_HEIGHT, frame_alignment(pixelformat) - 1, 0); if ((pix->width != width) || (pix->height != height)) dev_dbg(dev, "%s V4L2 TRY_FMT (OUTPUT): resolution updated %dx%d -> %dx%d to fit min/max/alignment\n", ctx->name, width, height, pix->width, pix->height); width = ALIGN(pix->width, HVA_WIDTH_ALIGNMENT); height = ALIGN(pix->height, HVA_HEIGHT_ALIGNMENT); if (!pix->colorspace) { pix->colorspace = V4L2_COLORSPACE_REC709; pix->xfer_func = V4L2_XFER_FUNC_DEFAULT; pix->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT; pix->quantization = V4L2_QUANTIZATION_DEFAULT; } pix->bytesperline = frame_stride(width, pixelformat); pix->sizeimage = frame_size(width, height, pixelformat); pix->field = V4L2_FIELD_NONE; return 0; }
static void hva_stop_streaming(struct vb2_queue *vq) { struct hva_ctx *ctx = vb2_get_drv_priv(vq); struct hva_dev *hva = ctx_to_hdev(ctx); struct device *dev = ctx_to_dev(ctx); const struct hva_enc *enc = ctx->enc; struct vb2_v4l2_buffer *vbuf; dev_dbg(dev, "%s %s stop streaming\n", ctx->name, to_type_str(vq->type)); if (vq->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) { /* return of all pending buffers to vb2 (in error state) */ ctx->frame_num = 0; while ((vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx))) v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR); } else { /* return of all pending buffers to vb2 (in error state) */ ctx->stream_num = 0; while ((vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx))) v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR); } if ((V4L2_TYPE_IS_OUTPUT(vq->type) && vb2_is_streaming(&ctx->fh.m2m_ctx->cap_q_ctx.q)) || (!V4L2_TYPE_IS_OUTPUT(vq->type) && vb2_is_streaming(&ctx->fh.m2m_ctx->out_q_ctx.q))) { dev_dbg(dev, "%s %s out=%d cap=%d\n", ctx->name, to_type_str(vq->type), vb2_is_streaming(&ctx->fh.m2m_ctx->out_q_ctx.q), vb2_is_streaming(&ctx->fh.m2m_ctx->cap_q_ctx.q)); return; } /* close encoder when both stop_streaming have been called */ if (enc) { dev_dbg(dev, "%s %s encoder closed\n", ctx->name, enc->name); enc->close(ctx); ctx->enc = NULL; /* clear instance context in instances array */ hva->instances[ctx->id] = NULL; hva->nb_of_instances--; } ctx->aborting = false; }
void hva_dbg_perf_end(struct hva_ctx *ctx, struct hva_stream *stream) { struct device *dev = ctx_to_dev(ctx); u64 div; u32 duration; u32 bytesused; u32 timestamp; struct hva_ctx_dbg *dbg = &ctx->dbg; ktime_t end = ktime_get(); /* stream bytesused and timestamp in us */ bytesused = vb2_get_plane_payload(&stream->vbuf.vb2_buf, 0); div = stream->vbuf.vb2_buf.timestamp; do_div(div, 1000); timestamp = (u32)div; /* encoding duration */ div = (u64)ktime_us_delta(end, dbg->begin); dev_dbg(dev, "%s perf stream[%d] dts=%d encoded using %d bytes in %d us", ctx->name, stream->vbuf.sequence, timestamp, bytesused, (u32)div); do_div(div, 100); duration = (u32)div; dbg->min_duration = min(duration, dbg->min_duration); dbg->max_duration = max(duration, dbg->max_duration); dbg->total_duration += duration; dbg->cnt_duration++; /* * the average bitrate is based on the total stream size * and the total encoding periods */ dbg->total_stream_size += bytesused; dbg->window_stream_size += bytesused; dbg->is_valid_period = true; }
static void hva_dbg_summary(struct hva_ctx *ctx) { struct device *dev = ctx_to_dev(ctx); struct hva_streaminfo *stream = &ctx->streaminfo; struct hva_frameinfo *frame = &ctx->frameinfo; if (!(ctx->flags & HVA_FLAG_STREAMINFO)) return; dev_dbg(dev, "%s %4.4s %dx%d > %4.4s %dx%d %s %s: %d frames encoded, %d system errors, %d encoding errors, %d frame errors\n", ctx->name, (char *)&frame->pixelformat, frame->aligned_width, frame->aligned_height, (char *)&stream->streamformat, stream->width, stream->height, stream->profile, stream->level, ctx->encoded_frames, ctx->sys_errors, ctx->encode_errors, ctx->frame_errors); }
static int hva_open_encoder(struct hva_ctx *ctx, u32 streamformat, u32 pixelformat, struct hva_enc **penc) { struct hva_dev *hva = ctx_to_hdev(ctx); struct device *dev = ctx_to_dev(ctx); struct hva_enc *enc; int ret; /* find an encoder which can deal with these formats */ enc = (struct hva_enc *)hva_find_encoder(ctx, pixelformat, streamformat); if (!enc) { dev_err(dev, "%s no encoder found matching %4.4s => %4.4s\n", ctx->name, (char *)&pixelformat, (char *)&streamformat); return -EINVAL; } dev_dbg(dev, "%s one encoder matching %4.4s => %4.4s\n", ctx->name, (char *)&pixelformat, (char *)&streamformat); /* update instance name */ snprintf(ctx->name, sizeof(ctx->name), "[%3d:%4.4s]", hva->instance_id, (char *)&streamformat); /* open encoder instance */ ret = enc->open(ctx); if (ret) { dev_err(dev, "%s failed to open encoder instance (%d)\n", ctx->name, ret); return ret; } dev_dbg(dev, "%s %s encoder opened\n", ctx->name, enc->name); *penc = enc; return ret; }
static int hva_queue_setup(struct vb2_queue *vq, unsigned int *num_buffers, unsigned int *num_planes, unsigned int sizes[], struct device *alloc_devs[]) { struct hva_ctx *ctx = vb2_get_drv_priv(vq); struct device *dev = ctx_to_dev(ctx); unsigned int size; dev_dbg(dev, "%s %s queue setup: num_buffers %d\n", ctx->name, to_type_str(vq->type), *num_buffers); size = vq->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ? ctx->frameinfo.size : ctx->max_stream_size; if (*num_planes) return sizes[0] < size ? -EINVAL : 0; /* only one plane supported */ *num_planes = 1; sizes[0] = size; return 0; }
static int hva_release(struct file *file) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct hva_dev *hva = ctx_to_hdev(ctx); struct device *dev = ctx_to_dev(ctx); const struct hva_enc *enc = ctx->enc; if (enc) { dev_dbg(dev, "%s %s encoder closed\n", ctx->name, enc->name); enc->close(ctx); ctx->enc = NULL; /* clear instance context in instances array */ hva->instances[ctx->id] = NULL; hva->nb_of_instances--; } /* trace a summary of instance before closing (debug purpose) */ hva_dbg_summary(ctx); v4l2_m2m_ctx_release(ctx->fh.m2m_ctx); v4l2_ctrl_handler_free(&ctx->ctrl_handler); v4l2_fh_del(&ctx->fh); v4l2_fh_exit(&ctx->fh); #ifdef CONFIG_VIDEO_STI_HVA_DEBUGFS hva_dbg_ctx_remove(ctx); #endif dev_info(dev, "%s encoder instance released\n", ctx->name); kfree(ctx); return 0; }
static int hva_s_fmt_frame(struct file *file, void *fh, struct v4l2_format *f) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct device *dev = ctx_to_dev(ctx); struct v4l2_pix_format *pix = &f->fmt.pix; struct vb2_queue *vq; int ret; ret = hva_try_fmt_frame(file, fh, f); if (ret) { dev_dbg(dev, "%s V4L2 S_FMT (OUTPUT): unsupported format %.4s\n", ctx->name, (char *)&pix->pixelformat); return ret; } vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type); if (vb2_is_streaming(vq)) { dev_dbg(dev, "%s V4L2 S_FMT (OUTPUT): queue busy\n", ctx->name); return -EBUSY; } ctx->colorspace = pix->colorspace; ctx->xfer_func = pix->xfer_func; ctx->ycbcr_enc = pix->ycbcr_enc; ctx->quantization = pix->quantization; ctx->frameinfo.aligned_width = ALIGN(pix->width, HVA_WIDTH_ALIGNMENT); ctx->frameinfo.aligned_height = ALIGN(pix->height, HVA_HEIGHT_ALIGNMENT); ctx->frameinfo.size = pix->sizeimage; ctx->frameinfo.pixelformat = pix->pixelformat; ctx->frameinfo.width = pix->width; ctx->frameinfo.height = pix->height; ctx->flags |= HVA_FLAG_FRAMEINFO; return 0; }
static int hva_job_ready(void *priv) { struct hva_ctx *ctx = priv; struct device *dev = ctx_to_dev(ctx); if (!v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx)) { dev_dbg(dev, "%s job not ready: no frame buffers\n", ctx->name); return 0; } if (!v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx)) { dev_dbg(dev, "%s job not ready: no stream buffers\n", ctx->name); return 0; } if (ctx->aborting) { dev_dbg(dev, "%s job not ready: aborting\n", ctx->name); return 0; } return 1; }
static int hva_start_streaming(struct vb2_queue *vq, unsigned int count) { struct hva_ctx *ctx = vb2_get_drv_priv(vq); struct hva_dev *hva = ctx_to_hdev(ctx); struct device *dev = ctx_to_dev(ctx); struct vb2_v4l2_buffer *vbuf; int ret; unsigned int i; bool found = false; dev_dbg(dev, "%s %s start streaming\n", ctx->name, to_type_str(vq->type)); /* open encoder when both start_streaming have been called */ if (V4L2_TYPE_IS_OUTPUT(vq->type)) { if (!vb2_start_streaming_called(&ctx->fh.m2m_ctx->cap_q_ctx.q)) return 0; } else { if (!vb2_start_streaming_called(&ctx->fh.m2m_ctx->out_q_ctx.q)) return 0; } /* store the instance context in the instances array */ for (i = 0; i < HVA_MAX_INSTANCES; i++) { if (!hva->instances[i]) { hva->instances[i] = ctx; /* save the context identifier in the context */ ctx->id = i; found = true; break; } } if (!found) { dev_err(dev, "%s maximum instances reached\n", ctx->name); ret = -ENOMEM; goto err; } hva->nb_of_instances++; if (!ctx->enc) { ret = hva_open_encoder(ctx, ctx->streaminfo.streamformat, ctx->frameinfo.pixelformat, &ctx->enc); if (ret < 0) goto err_ctx; } return 0; err_ctx: hva->instances[ctx->id] = NULL; hva->nb_of_instances--; err: if (vq->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) { /* return of all pending buffers to vb2 (in queued state) */ while ((vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx))) v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_QUEUED); } else { /* return of all pending buffers to vb2 (in queued state) */ while ((vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx))) v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_QUEUED); } return ret; }
static int hva_try_fmt_stream(struct file *file, void *priv, struct v4l2_format *f) { struct hva_ctx *ctx = fh_to_ctx(file->private_data); struct device *dev = ctx_to_dev(ctx); struct v4l2_pix_format *pix = &f->fmt.pix; u32 streamformat = pix->pixelformat; const struct hva_enc *enc; u32 width, height; u32 stream_size; enc = hva_find_encoder(ctx, ctx->frameinfo.pixelformat, streamformat); if (!enc) { dev_dbg(dev, "%s V4L2 TRY_FMT (CAPTURE): unsupported format %.4s\n", ctx->name, (char *)&pix->pixelformat); return -EINVAL; } width = pix->width; height = pix->height; if (ctx->flags & HVA_FLAG_FRAMEINFO) { /* * if the frame resolution is already fixed, only allow the * same stream resolution */ pix->width = ctx->frameinfo.width; pix->height = ctx->frameinfo.height; if ((pix->width != width) || (pix->height != height)) dev_dbg(dev, "%s V4L2 TRY_FMT (CAPTURE): resolution updated %dx%d -> %dx%d to fit frame resolution\n", ctx->name, width, height, pix->width, pix->height); } else { /* adjust width & height */ v4l_bound_align_image(&pix->width, HVA_MIN_WIDTH, enc->max_width, 0, &pix->height, HVA_MIN_HEIGHT, enc->max_height, 0, 0); if ((pix->width != width) || (pix->height != height)) dev_dbg(dev, "%s V4L2 TRY_FMT (CAPTURE): resolution updated %dx%d -> %dx%d to fit min/max/alignment\n", ctx->name, width, height, pix->width, pix->height); } stream_size = estimated_stream_size(pix->width, pix->height); if (pix->sizeimage < stream_size) pix->sizeimage = stream_size; pix->bytesperline = 0; pix->colorspace = ctx->colorspace; pix->xfer_func = ctx->xfer_func; pix->ycbcr_enc = ctx->ycbcr_enc; pix->quantization = ctx->quantization; pix->field = V4L2_FIELD_NONE; return 0; }
static int hva_ctrls_setup(struct hva_ctx *ctx) { struct device *dev = ctx_to_dev(ctx); u64 mask; enum v4l2_mpeg_video_h264_sei_fp_arrangement_type sei_fp_type = V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TOP_BOTTOM; v4l2_ctrl_handler_init(&ctx->ctrl_handler, 15); v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_BITRATE_MODE, V4L2_MPEG_VIDEO_BITRATE_MODE_CBR, 0, V4L2_MPEG_VIDEO_BITRATE_MODE_CBR); v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_GOP_SIZE, 1, 60, 1, 16); v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_BITRATE, 1000, 60000000, 1000, 20000000); mask = ~(1 << V4L2_MPEG_VIDEO_ASPECT_1x1); v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_ASPECT, V4L2_MPEG_VIDEO_ASPECT_1x1, mask, V4L2_MPEG_VIDEO_ASPECT_1x1); mask = ~((1 << V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE) | (1 << V4L2_MPEG_VIDEO_H264_PROFILE_MAIN) | (1 << V4L2_MPEG_VIDEO_H264_PROFILE_HIGH) | (1 << V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH)); v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_PROFILE, V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH, mask, V4L2_MPEG_VIDEO_H264_PROFILE_HIGH); v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_LEVEL, V4L2_MPEG_VIDEO_H264_LEVEL_4_2, 0, V4L2_MPEG_VIDEO_H264_LEVEL_4_0); v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE, V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC, 0, V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC); v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE, 1, 10000, 1, 3000); v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM, 0, 1, 1, 0); v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_MIN_QP, 0, 51, 1, 5); v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_MAX_QP, 0, 51, 1, 51); v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE, 0, 1, 1, 1); mask = ~(1 << V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1); v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1, mask, V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1); v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING, 0, 1, 1, 0); mask = ~(1 << sei_fp_type); v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops, V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE, sei_fp_type, mask, sei_fp_type); if (ctx->ctrl_handler.error) { int err = ctx->ctrl_handler.error; dev_dbg(dev, "%s controls setup failed (%d)\n", ctx->name, err); v4l2_ctrl_handler_free(&ctx->ctrl_handler); return err; } v4l2_ctrl_handler_setup(&ctx->ctrl_handler); /* set default time per frame */ ctx->ctrls.time_per_frame.numerator = HVA_DEFAULT_FRAME_NUM; ctx->ctrls.time_per_frame.denominator = HVA_DEFAULT_FRAME_DEN; return 0; }
static int hva_s_ctrl(struct v4l2_ctrl *ctrl) { struct hva_ctx *ctx = container_of(ctrl->handler, struct hva_ctx, ctrl_handler); struct device *dev = ctx_to_dev(ctx); dev_dbg(dev, "%s S_CTRL: id = %d, val = %d\n", ctx->name, ctrl->id, ctrl->val); switch (ctrl->id) { case V4L2_CID_MPEG_VIDEO_BITRATE_MODE: ctx->ctrls.bitrate_mode = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_GOP_SIZE: ctx->ctrls.gop_size = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_BITRATE: ctx->ctrls.bitrate = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_ASPECT: ctx->ctrls.aspect = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_H264_PROFILE: ctx->ctrls.profile = ctrl->val; if (ctx->flags & HVA_FLAG_STREAMINFO) snprintf(ctx->streaminfo.profile, sizeof(ctx->streaminfo.profile), "%s profile", v4l2_ctrl_get_menu(ctrl->id)[ctrl->val]); break; case V4L2_CID_MPEG_VIDEO_H264_LEVEL: ctx->ctrls.level = ctrl->val; if (ctx->flags & HVA_FLAG_STREAMINFO) snprintf(ctx->streaminfo.level, sizeof(ctx->streaminfo.level), "level %s", v4l2_ctrl_get_menu(ctrl->id)[ctrl->val]); break; case V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE: ctx->ctrls.entropy_mode = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE: ctx->ctrls.cpb_size = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM: ctx->ctrls.dct8x8 = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_H264_MIN_QP: ctx->ctrls.qpmin = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_H264_MAX_QP: ctx->ctrls.qpmax = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE: ctx->ctrls.vui_sar = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC: ctx->ctrls.vui_sar_idc = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING: ctx->ctrls.sei_fp = ctrl->val; break; case V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE: ctx->ctrls.sei_fp_type = ctrl->val; break; default: dev_dbg(dev, "%s S_CTRL: invalid control (id = %d)\n", ctx->name, ctrl->id); return -EINVAL; } return 0; }