static void init_params (GstMfxFilter * filter) { gdouble frame_rate; filter->params.vpp.In = filter->frame_info; /* Aligned frame dimensions may differ between input and output surfaces * so we sanitize the input frame dimensions, since output frame dimensions * could have certain alignment requirements used in HEVC HW encoding */ if (filter->shared_request[1]) { filter->params.vpp.In.Width = GST_ROUND_UP_16 (filter->frame_info.CropW); filter->params.vpp.In.Height = (MFX_PICSTRUCT_PROGRESSIVE == filter->frame_info.PicStruct) ? GST_ROUND_UP_16 (filter->frame_info.CropH) : GST_ROUND_UP_32 (filter->frame_info.CropH); } filter->params.vpp.Out = filter->frame_info; if (filter->fourcc) filter->params.vpp.Out.FourCC = filter->fourcc; if (filter->width) { filter->params.vpp.Out.CropW = filter->width; filter->params.vpp.Out.Width = GST_ROUND_UP_16 (filter->width); } if (filter->height) { filter->params.vpp.Out.CropH = filter->height; filter->params.vpp.Out.Height = (MFX_PICSTRUCT_PROGRESSIVE == filter->frame_info.PicStruct) ? GST_ROUND_UP_16 (filter->height) : GST_ROUND_UP_32 (filter->height); } if (filter->filter_op & GST_MFX_FILTER_DEINTERLACING) { /* Setup special double frame rate deinterlace mode */ gst_util_fraction_to_double (filter->params.vpp.In.FrameRateExtN, filter->params.vpp.In.FrameRateExtD, &frame_rate); if ((filter->frame_info.PicStruct == MFX_PICSTRUCT_FIELD_TFF || filter->frame_info.PicStruct == MFX_PICSTRUCT_FIELD_BFF) && (int)(frame_rate + 0.5) == 60) filter->params.vpp.In.FrameRateExtN /= 2; filter->params.vpp.Out.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; } if (filter->filter_op & GST_MFX_FILTER_FRAMERATE_CONVERSION && (filter->fps_n && filter->fps_d)) { filter->params.vpp.Out.FrameRateExtN = filter->fps_n; filter->params.vpp.Out.FrameRateExtD = filter->fps_d; } configure_filters (filter); }
void gst_mfx_filter_set_frame_info_from_gst_video_info (GstMfxFilter * filter, const GstVideoInfo * info) { g_return_if_fail (filter != NULL); filter->frame_info.ChromaFormat = MFX_CHROMAFORMAT_YUV420; filter->frame_info.FourCC = gst_video_format_to_mfx_fourcc (GST_VIDEO_INFO_FORMAT (info)); filter->frame_info.PicStruct = GST_VIDEO_INFO_IS_INTERLACED (info) ? (GST_VIDEO_INFO_FLAG_IS_SET (info, GST_VIDEO_FRAME_FLAG_TFF) ? MFX_PICSTRUCT_FIELD_TFF : MFX_PICSTRUCT_FIELD_BFF) : MFX_PICSTRUCT_PROGRESSIVE; filter->frame_info.CropX = 0; filter->frame_info.CropY = 0; filter->frame_info.CropW = info->width; filter->frame_info.CropH = info->height; filter->frame_info.FrameRateExtN = info->fps_n ? info->fps_n : 30; filter->frame_info.FrameRateExtD = info->fps_d; filter->frame_info.AspectRatioW = info->par_n; filter->frame_info.AspectRatioH = info->par_d; filter->frame_info.BitDepthChroma = 8; filter->frame_info.BitDepthLuma = 8; filter->frame_info.Width = GST_ROUND_UP_16 (info->width); filter->frame_info.Height = (MFX_PICSTRUCT_PROGRESSIVE == filter->frame_info.PicStruct) ? GST_ROUND_UP_16 (info->height) : GST_ROUND_UP_32 (info->height); }
static void gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame) { guchar *r_rows[16], *g_rows[16], *b_rows[16]; guchar **scanarray[3] = { r_rows, g_rows, b_rows }; gint i, j, k; gint lines; guint8 *base[3]; guint pstride, rstride; gint width, height; GST_DEBUG_OBJECT (dec, "indirect decoding of RGB"); width = GST_VIDEO_FRAME_WIDTH (frame); height = GST_VIDEO_FRAME_HEIGHT (frame); if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width)))) return; for (i = 0; i < 3; i++) base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i); pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0); rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer)); memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer)); memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer)); i = 0; while (i < height) { lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE); if (G_LIKELY (lines > 0)) { for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) { gint p; p = 0; for (k = 0; k < width; k++) { base[0][p] = r_rows[j][k]; base[1][p] = g_rows[j][k]; base[2][p] = b_rows[j][k]; p += pstride; } base[0] += rstride; base[1] += rstride; base[2] += rstride; } } else { GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0"); } } }
static int fill_planes (GstVideoInfo * info) { gsize width, height, cr_h; width = (gsize) info->width; height = (gsize) info->height; switch (info->finfo->format) { case GST_VIDEO_FORMAT_YUY2: case GST_VIDEO_FORMAT_YVYU: case GST_VIDEO_FORMAT_UYVY: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_xBGR: case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_r210: info->stride[0] = width * 4; info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_RGB16: case GST_VIDEO_FORMAT_BGR16: case GST_VIDEO_FORMAT_RGB15: case GST_VIDEO_FORMAT_BGR15: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_v308: info->stride[0] = GST_ROUND_UP_4 (width * 3); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_v210: info->stride[0] = ((width + 47) / 48) * 128; info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_v216: info->stride[0] = GST_ROUND_UP_8 (width * 4); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_GRAY8: info->stride[0] = GST_ROUND_UP_4 (width); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_GRAY16_BE: case GST_VIDEO_FORMAT_GRAY16_LE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_UYVP: info->stride[0] = GST_ROUND_UP_4 ((width * 2 * 5 + 3) / 4); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_RGB8P: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = 4; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->size = info->offset[1] + (4 * 256); break; case GST_VIDEO_FORMAT_IYU1: info->stride[0] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) + GST_ROUND_UP_4 (width) / 2); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_ARGB64: case GST_VIDEO_FORMAT_AYUV64: info->stride[0] = width * 8; info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_YV12: /* same as I420, but plane 1+2 swapped */ info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->size = info->offset[2] + info->stride[2] * cr_h; break; case GST_VIDEO_FORMAT_Y41B: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_16 (width) / 4; info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * height; /* simplification of ROUNDUP4(w)*h + 2*((ROUNDUP16(w)/4)*h */ info->size = (info->stride[0] + (GST_ROUND_UP_16 (width) / 2)) * height; break; case GST_VIDEO_FORMAT_Y42B: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_8 (width) / 2; info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * height; /* simplification of ROUNDUP4(w)*h + 2*(ROUNDUP8(w)/2)*h */ info->size = (info->stride[0] + GST_ROUND_UP_8 (width)) * height; break; case GST_VIDEO_FORMAT_Y444: case GST_VIDEO_FORMAT_GBR: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; info->size = info->stride[0] * height * 3; break; case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV21: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->size = info->offset[1] + info->stride[0] * cr_h; break; case GST_VIDEO_FORMAT_NV16: case GST_VIDEO_FORMAT_NV61: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->size = info->stride[0] * height * 2; break; case GST_VIDEO_FORMAT_NV24: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->size = info->stride[0] * height + info->stride[1] * height; break; case GST_VIDEO_FORMAT_A420: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2); info->stride[2] = info->stride[1]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->offset[3] = info->offset[2] + info->stride[2] * cr_h; info->size = info->offset[3] + info->stride[0] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_YUV9: case GST_VIDEO_FORMAT_YVU9: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; cr_h = GST_ROUND_UP_4 (height) / 4; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->size = info->offset[2] + info->stride[2] * cr_h; break; case GST_VIDEO_FORMAT_I420_10LE: case GST_VIDEO_FORMAT_I420_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->size = info->offset[2] + info->stride[2] * cr_h; break; case GST_VIDEO_FORMAT_I422_10LE: case GST_VIDEO_FORMAT_I422_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * GST_ROUND_UP_2 (height); info->size = info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_Y444_10LE: case GST_VIDEO_FORMAT_Y444_10BE: case GST_VIDEO_FORMAT_GBR_10LE: case GST_VIDEO_FORMAT_GBR_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; info->size = info->stride[0] * height * 3; break; case GST_VIDEO_FORMAT_NV12_64Z32: info->stride[0] = GST_VIDEO_TILE_MAKE_STRIDE (GST_ROUND_UP_128 (width) / 64, GST_ROUND_UP_32 (height) / 32); info->stride[1] = GST_VIDEO_TILE_MAKE_STRIDE (GST_ROUND_UP_128 (width) / 64, GST_ROUND_UP_64 (height) / 64); info->offset[0] = 0; info->offset[1] = GST_ROUND_UP_128 (width) * GST_ROUND_UP_32 (height); info->size = info->offset[1] + GST_ROUND_UP_128 (width) * GST_ROUND_UP_64 (height) / 2; break; case GST_VIDEO_FORMAT_A420_10LE: case GST_VIDEO_FORMAT_A420_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->offset[3] = info->offset[2] + info->stride[2] * cr_h; info->size = info->offset[3] + info->stride[0] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_A422_10LE: case GST_VIDEO_FORMAT_A422_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * GST_ROUND_UP_2 (height); info->offset[3] = info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height); info->size = info->offset[3] + info->stride[0] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_A444_10LE: case GST_VIDEO_FORMAT_A444_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; info->offset[3] = info->offset[1] * 3; info->size = info->stride[0] * height * 4; break; case GST_VIDEO_FORMAT_ENCODED: break; case GST_VIDEO_FORMAT_UNKNOWN: GST_ERROR ("invalid format"); g_warning ("invalid format"); break; } return 0; }
static void gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v, gint r_h, gint comp) { guchar *y_rows[16], *u_rows[16], *v_rows[16]; guchar **scanarray[3] = { y_rows, u_rows, v_rows }; gint i, j, k; gint lines; guchar *base[3], *last[3]; gint stride[3]; gint width, height; GST_DEBUG_OBJECT (dec, "unadvantageous width or r_h, taking slow route involving memcpy"); width = GST_VIDEO_FRAME_WIDTH (frame); height = GST_VIDEO_FRAME_HEIGHT (frame); if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width)))) return; for (i = 0; i < 3; i++) { base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i); stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i); /* make sure we don't make jpeglib write beyond our buffer, * which might happen if (height % (r_v*DCTSIZE)) != 0 */ last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1)); } memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer)); memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer)); memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer)); /* fill chroma components for grayscale */ if (comp == 1) { GST_DEBUG_OBJECT (dec, "grayscale, filling chroma"); for (i = 0; i < 16; i++) { memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80); memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80); } } for (i = 0; i < height; i += r_v * DCTSIZE) { lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE); if (G_LIKELY (lines > 0)) { for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) { if (G_LIKELY (base[0] <= last[0])) { memcpy (base[0], y_rows[j], stride[0]); base[0] += stride[0]; } if (r_v == 2) { if (G_LIKELY (base[0] <= last[0])) { memcpy (base[0], y_rows[j + 1], stride[0]); base[0] += stride[0]; } } if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) { if (r_h == 2) { memcpy (base[1], u_rows[k], stride[1]); memcpy (base[2], v_rows[k], stride[2]); } else if (r_h == 1) { hresamplecpy1 (base[1], u_rows[k], stride[1]); hresamplecpy1 (base[2], v_rows[k], stride[2]); } else { /* FIXME: implement (at least we avoid crashing by doing nothing) */ } } if (r_v == 2 || (k & 1) != 0) { base[1] += stride[1]; base[2] += stride[2]; } } } else { GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0"); } } }
static gboolean gst_msdkdec_set_src_caps (GstMsdkDec * thiz, gboolean need_allocation) { GstVideoCodecState *output_state; GstVideoInfo *vinfo; GstVideoAlignment align; GstCaps *allocation_caps = NULL; GstVideoFormat format; guint width, height; const gchar *format_str; /* use display width and display height in output state which * will be using for caps negotiation */ width = thiz->param.mfx.FrameInfo.CropW ? thiz->param.mfx. FrameInfo.CropW : GST_VIDEO_INFO_WIDTH (&thiz->input_state->info); height = thiz->param.mfx.FrameInfo.CropH ? thiz->param.mfx. FrameInfo.CropH : GST_VIDEO_INFO_HEIGHT (&thiz->input_state->info); format = gst_msdk_get_video_format_from_mfx_fourcc (thiz->param.mfx. FrameInfo.FourCC); if (format == GST_VIDEO_FORMAT_UNKNOWN) { GST_WARNING_OBJECT (thiz, "Failed to find a valid video format\n"); return FALSE; } output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (thiz), format, width, height, thiz->input_state); if (!output_state) return FALSE; /* Ensure output_state->caps and info has same width and height * Also mandate the 32 bit alignment */ vinfo = &output_state->info; gst_msdk_set_video_alignment (vinfo, &align); gst_video_info_align (vinfo, &align); output_state->caps = gst_video_info_to_caps (vinfo); if (srcpad_can_dmabuf (thiz)) gst_caps_set_features (output_state->caps, 0, gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_DMABUF, NULL)); thiz->output_info = output_state->info; if (need_allocation) { /* Find allocation width and height */ width = GST_ROUND_UP_16 (thiz->param.mfx.FrameInfo.Width ? thiz->param.mfx. FrameInfo.Width : GST_VIDEO_INFO_WIDTH (&output_state->info)); height = GST_ROUND_UP_32 (thiz->param.mfx.FrameInfo.Height ? thiz->param.mfx. FrameInfo.Height : GST_VIDEO_INFO_HEIGHT (&output_state->info)); /* set allocation width and height in allocation_caps * which may or may not be similar to the output_state caps */ allocation_caps = gst_caps_copy (output_state->caps); format_str = gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&thiz->output_info)); gst_caps_set_simple (allocation_caps, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "format", G_TYPE_STRING, format_str, NULL); GST_INFO_OBJECT (thiz, "new alloc caps = %" GST_PTR_FORMAT, allocation_caps); gst_caps_replace (&thiz->allocation_caps, allocation_caps); } else { /* We keep the allocation parameters as it is to avoid pool renegotiation. * For codecs like VP9, dynamic resolution change doesn't requires allocation * reset if the new video frame resolution is lower than the * already configured one */ allocation_caps = gst_caps_copy (thiz->allocation_caps); } gst_caps_replace (&output_state->allocation_caps, allocation_caps); if (allocation_caps) gst_caps_unref (allocation_caps); gst_video_codec_state_unref (output_state); return TRUE; }
static gboolean gst_msdkdec_init_decoder (GstMsdkDec * thiz) { GstVideoInfo *info; mfxSession session; mfxStatus status; mfxFrameAllocRequest request; if (thiz->initialized) return TRUE; if (!thiz->context) { GST_WARNING_OBJECT (thiz, "No MSDK Context"); return FALSE; } if (!thiz->input_state) { GST_DEBUG_OBJECT (thiz, "Have no input state yet"); return FALSE; } info = &thiz->input_state->info; GST_OBJECT_LOCK (thiz); if (thiz->use_video_memory) { gst_msdk_set_frame_allocator (thiz->context); thiz->param.IOPattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY; } else { thiz->param.IOPattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY; } GST_INFO_OBJECT (thiz, "This MSDK decoder uses %s memory", thiz->use_video_memory ? "video" : "system"); thiz->param.AsyncDepth = thiz->async_depth; /* We expect msdk to fill the width and height values */ g_return_val_if_fail (thiz->param.mfx.FrameInfo.Width && thiz->param.mfx.FrameInfo.Height, FALSE); /* Force 32 bit rounding to avoid messing up of memory alignment when * dealing with different allocators */ /* Fixme: msdk sometimes only requires 16 bit rounding, optimization possible */ thiz->param.mfx.FrameInfo.Width = GST_ROUND_UP_16 (thiz->param.mfx.FrameInfo.Width); thiz->param.mfx.FrameInfo.Height = GST_ROUND_UP_32 (thiz->param.mfx.FrameInfo.Height); /* Set framerate only if provided. * If not, framerate will be assumed inside the driver. * Also we respect the upstream provided fps values */ if (info->fps_n > 0 && info->fps_d > 0 && info->fps_n != thiz->param.mfx.FrameInfo.FrameRateExtN && info->fps_d != thiz->param.mfx.FrameInfo.FrameRateExtD) { thiz->param.mfx.FrameInfo.FrameRateExtN = info->fps_n; thiz->param.mfx.FrameInfo.FrameRateExtD = info->fps_d; } if (info->par_n && info->par_d && !thiz->param.mfx.FrameInfo.AspectRatioW && !thiz->param.mfx.FrameInfo.AspectRatioH) { thiz->param.mfx.FrameInfo.AspectRatioW = info->par_n; thiz->param.mfx.FrameInfo.AspectRatioH = info->par_d; } thiz->param.mfx.FrameInfo.PicStruct = thiz->param.mfx.FrameInfo.PicStruct ? thiz->param.mfx. FrameInfo.PicStruct : MFX_PICSTRUCT_PROGRESSIVE; thiz->param.mfx.FrameInfo.FourCC = thiz->param.mfx.FrameInfo.FourCC ? thiz->param.mfx. FrameInfo.FourCC : MFX_FOURCC_NV12; thiz->param.mfx.FrameInfo.ChromaFormat = thiz->param.mfx.FrameInfo.ChromaFormat ? thiz->param.mfx. FrameInfo.ChromaFormat : MFX_CHROMAFORMAT_YUV420; session = gst_msdk_context_get_session (thiz->context); /* validate parameters and allow the Media SDK to make adjustments */ status = MFXVideoDECODE_Query (session, &thiz->param, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Video Decode Query failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Video Decode Query returned: %s", msdk_status_to_string (status)); } /* Force the structure to MFX_PICSTRUCT_PROGRESSIVE if it is unknow to * work-around MSDK issue: * https://github.com/Intel-Media-SDK/MediaSDK/issues/1139 */ if (thiz->param.mfx.FrameInfo.PicStruct == MFX_PICSTRUCT_UNKNOWN) thiz->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; status = MFXVideoDECODE_QueryIOSurf (session, &thiz->param, &request); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Query IO surfaces failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Query IO surfaces returned: %s", msdk_status_to_string (status)); } if (request.NumFrameSuggested < thiz->param.AsyncDepth) { GST_ERROR_OBJECT (thiz, "Required %d surfaces (%d suggested), async %d", request.NumFrameMin, request.NumFrameSuggested, thiz->param.AsyncDepth); goto failed; } /* account the downstream requirement */ if (G_LIKELY (thiz->min_prealloc_buffers)) request.NumFrameSuggested += thiz->min_prealloc_buffers; else GST_WARNING_OBJECT (thiz, "Allocating resources without considering the downstream requirement" "or extra scratch surface count"); if (thiz->use_video_memory) { gint shared_async_depth; shared_async_depth = gst_msdk_context_get_shared_async_depth (thiz->context); request.NumFrameSuggested += shared_async_depth; request.Type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET; if (thiz->use_dmabuf) request.Type |= MFX_MEMTYPE_EXPORT_FRAME; gst_msdk_frame_alloc (thiz->context, &request, &thiz->alloc_resp); } /* update the prealloc_buffer count which will be used later * as GstBufferPool min_buffers */ thiz->min_prealloc_buffers = request.NumFrameSuggested; GST_DEBUG_OBJECT (thiz, "Required %d surfaces (%d suggested)", request.NumFrameMin, request.NumFrameSuggested); status = MFXVideoDECODE_Init (session, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Init failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Init returned: %s", msdk_status_to_string (status)); } status = MFXVideoDECODE_GetVideoParam (session, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Get Video Parameters failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Get Video Parameters returned: %s", msdk_status_to_string (status)); } g_array_set_size (thiz->tasks, 0); /* resets array content */ g_array_set_size (thiz->tasks, thiz->param.AsyncDepth); thiz->next_task = 0; GST_OBJECT_UNLOCK (thiz); thiz->initialized = TRUE; return TRUE; failed: GST_OBJECT_UNLOCK (thiz); return FALSE; }