static GstVaapiEncoderStatus set_context_info (GstVaapiEncoder * base_encoder) { GstVaapiEncoderJpeg *encoder = GST_VAAPI_ENCODER_JPEG_CAST (base_encoder); GstVideoInfo *const vip = GST_VAAPI_ENCODER_VIDEO_INFO (encoder); /* Maximum sizes for common headers (in bytes) */ enum { MAX_APP_HDR_SIZE = 20, MAX_FRAME_HDR_SIZE = 19, MAX_QUANT_TABLE_SIZE = 138, MAX_HUFFMAN_TABLE_SIZE = 432, MAX_SCAN_HDR_SIZE = 14 }; if (!ensure_hw_profile (encoder)) return GST_VAAPI_ENCODER_STATUS_ERROR_UNSUPPORTED_PROFILE; base_encoder->num_ref_frames = 0; /* Only YUV 4:2:0 formats are supported for now. */ base_encoder->codedbuf_size = GST_ROUND_UP_16 (vip->width) * GST_ROUND_UP_16 (vip->height) * 3 / 2; base_encoder->codedbuf_size += MAX_APP_HDR_SIZE + MAX_FRAME_HDR_SIZE + MAX_QUANT_TABLE_SIZE + MAX_HUFFMAN_TABLE_SIZE + MAX_SCAN_HDR_SIZE; return GST_VAAPI_ENCODER_STATUS_SUCCESS; }
void gst_mfx_filter_set_frame_info_from_gst_video_info (GstMfxFilter * filter, const GstVideoInfo * info) { g_return_if_fail (filter != NULL); filter->frame_info.ChromaFormat = MFX_CHROMAFORMAT_YUV420; filter->frame_info.FourCC = gst_video_format_to_mfx_fourcc (GST_VIDEO_INFO_FORMAT (info)); filter->frame_info.PicStruct = GST_VIDEO_INFO_IS_INTERLACED (info) ? (GST_VIDEO_INFO_FLAG_IS_SET (info, GST_VIDEO_FRAME_FLAG_TFF) ? MFX_PICSTRUCT_FIELD_TFF : MFX_PICSTRUCT_FIELD_BFF) : MFX_PICSTRUCT_PROGRESSIVE; filter->frame_info.CropX = 0; filter->frame_info.CropY = 0; filter->frame_info.CropW = info->width; filter->frame_info.CropH = info->height; filter->frame_info.FrameRateExtN = info->fps_n ? info->fps_n : 30; filter->frame_info.FrameRateExtD = info->fps_d; filter->frame_info.AspectRatioW = info->par_n; filter->frame_info.AspectRatioH = info->par_d; filter->frame_info.BitDepthChroma = 8; filter->frame_info.BitDepthLuma = 8; filter->frame_info.Width = GST_ROUND_UP_16 (info->width); filter->frame_info.Height = (MFX_PICSTRUCT_PROGRESSIVE == filter->frame_info.PicStruct) ? GST_ROUND_UP_16 (info->height) : GST_ROUND_UP_32 (info->height); }
static void theora_enc_init_buffer (th_ycbcr_buffer buf, GstVideoFrame * frame) { GstVideoInfo vinfo; guint i; /* According to Theora developer Timothy Terriberry, the Theora * encoder will not use memory outside of pic_width/height, even when * the frame size is bigger. The values outside this region will be encoded * to default values. * Due to this, setting the frame's width/height as the buffer width/height * is perfectly ok, even though it does not strictly look ok. */ gst_video_info_init (&vinfo); gst_video_info_set_format (&vinfo, GST_VIDEO_FRAME_FORMAT (frame), GST_ROUND_UP_16 (GST_VIDEO_FRAME_WIDTH (frame)), GST_ROUND_UP_16 (GST_VIDEO_FRAME_HEIGHT (frame))); for (i = 0; i < 3; i++) { buf[i].width = GST_VIDEO_INFO_COMP_WIDTH (&vinfo, i); buf[i].height = GST_VIDEO_INFO_COMP_HEIGHT (&vinfo, i); buf[i].data = GST_VIDEO_FRAME_COMP_DATA (frame, i); buf[i].stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, i); } }
static gboolean theora_enc_set_format (GstVideoEncoder * benc, GstVideoCodecState * state) { GstTheoraEnc *enc = GST_THEORA_ENC (benc); GstVideoInfo *info = &state->info; enc->width = GST_VIDEO_INFO_WIDTH (info); enc->height = GST_VIDEO_INFO_HEIGHT (info); th_info_clear (&enc->info); th_info_init (&enc->info); /* Theora has a divisible-by-sixteen restriction for the encoded video size but * we can define a picture area using pic_width/pic_height */ enc->info.frame_width = GST_ROUND_UP_16 (enc->width); enc->info.frame_height = GST_ROUND_UP_16 (enc->height); enc->info.pic_width = enc->width; enc->info.pic_height = enc->height; switch (GST_VIDEO_INFO_FORMAT (info)) { case GST_VIDEO_FORMAT_I420: enc->info.pixel_fmt = TH_PF_420; break; case GST_VIDEO_FORMAT_Y42B: enc->info.pixel_fmt = TH_PF_422; break; case GST_VIDEO_FORMAT_Y444: enc->info.pixel_fmt = TH_PF_444; break; default: g_assert_not_reached (); } enc->info.fps_numerator = enc->fps_n = GST_VIDEO_INFO_FPS_N (info); enc->info.fps_denominator = enc->fps_d = GST_VIDEO_INFO_FPS_D (info); enc->info.aspect_numerator = GST_VIDEO_INFO_PAR_N (info); enc->info.aspect_denominator = GST_VIDEO_INFO_PAR_D (info); enc->info.colorspace = TH_CS_UNSPECIFIED; /* Save input state */ if (enc->input_state) gst_video_codec_state_unref (enc->input_state); enc->input_state = gst_video_codec_state_ref (state); /* as done in theora */ enc->info.keyframe_granule_shift = _ilog (enc->keyframe_force - 1); GST_DEBUG_OBJECT (enc, "keyframe_frequency_force is %d, granule shift is %d", enc->keyframe_force, enc->info.keyframe_granule_shift); theora_enc_reset (enc); enc->initialised = TRUE; return TRUE; }
static void paint_setup_Y41B (paintinfo * p, unsigned char *dest) { p->yp = dest; p->ystride = GST_ROUND_UP_4 (p->width); p->up = p->yp + p->ystride * p->height; p->ustride = GST_ROUND_UP_16 (p->width) / 4; p->vp = p->up + p->ustride * p->height; p->vstride = GST_ROUND_UP_16 (p->width) / 4; p->endptr = p->vp + p->vstride * p->height; }
static void init_params (GstMfxFilter * filter) { gdouble frame_rate; filter->params.vpp.In = filter->frame_info; /* Aligned frame dimensions may differ between input and output surfaces * so we sanitize the input frame dimensions, since output frame dimensions * could have certain alignment requirements used in HEVC HW encoding */ if (filter->shared_request[1]) { filter->params.vpp.In.Width = GST_ROUND_UP_16 (filter->frame_info.CropW); filter->params.vpp.In.Height = (MFX_PICSTRUCT_PROGRESSIVE == filter->frame_info.PicStruct) ? GST_ROUND_UP_16 (filter->frame_info.CropH) : GST_ROUND_UP_32 (filter->frame_info.CropH); } filter->params.vpp.Out = filter->frame_info; if (filter->fourcc) filter->params.vpp.Out.FourCC = filter->fourcc; if (filter->width) { filter->params.vpp.Out.CropW = filter->width; filter->params.vpp.Out.Width = GST_ROUND_UP_16 (filter->width); } if (filter->height) { filter->params.vpp.Out.CropH = filter->height; filter->params.vpp.Out.Height = (MFX_PICSTRUCT_PROGRESSIVE == filter->frame_info.PicStruct) ? GST_ROUND_UP_16 (filter->height) : GST_ROUND_UP_32 (filter->height); } if (filter->filter_op & GST_MFX_FILTER_DEINTERLACING) { /* Setup special double frame rate deinterlace mode */ gst_util_fraction_to_double (filter->params.vpp.In.FrameRateExtN, filter->params.vpp.In.FrameRateExtD, &frame_rate); if ((filter->frame_info.PicStruct == MFX_PICSTRUCT_FIELD_TFF || filter->frame_info.PicStruct == MFX_PICSTRUCT_FIELD_BFF) && (int)(frame_rate + 0.5) == 60) filter->params.vpp.In.FrameRateExtN /= 2; filter->params.vpp.Out.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; } if (filter->filter_op & GST_MFX_FILTER_FRAMERATE_CONVERSION && (filter->fps_n && filter->fps_d)) { filter->params.vpp.Out.FrameRateExtN = filter->fps_n; filter->params.vpp.Out.FrameRateExtD = filter->fps_d; } configure_filters (filter); }
/** * gst_buffer_list_insert: * @list: a #GstBufferList * @idx: the index * @buffer: (transfer full): a #GstBuffer * * Insert @buffer at @idx in @list. Other buffers are moved to make room for * this new buffer. * * A -1 value for @idx will append the buffer at the end. */ void gst_buffer_list_insert (GstBufferList * list, gint idx, GstBuffer * buffer) { guint want_alloc; g_return_if_fail (GST_IS_BUFFER_LIST (list)); g_return_if_fail (buffer != NULL); g_return_if_fail (gst_buffer_list_is_writable (list)); if (idx == -1 && list->n_buffers < list->n_allocated) { list->buffers[list->n_buffers++] = buffer; return; } if (idx == -1 || idx > list->n_buffers) idx = list->n_buffers; want_alloc = list->n_buffers + 1; if (want_alloc > list->n_allocated) { want_alloc = MAX (GST_ROUND_UP_16 (want_alloc), list->n_allocated * 2); if (GST_BUFFER_LIST_IS_USING_DYNAMIC_ARRAY (list)) { list->buffers = g_renew (GstBuffer *, list->buffers, want_alloc); } else {
static GstVaapiEncoderStatus set_context_info (GstVaapiEncoder * base_encoder) { GstVaapiEncoderMpeg2 *const encoder = GST_VAAPI_ENCODER_MPEG2_CAST (base_encoder); GstVideoInfo *const vip = GST_VAAPI_ENCODER_VIDEO_INFO (encoder); /* Maximum sizes for common headers (in bytes) */ enum { MAX_SEQ_HDR_SIZE = 140, MAX_SEQ_EXT_SIZE = 10, MAX_GOP_SIZE = 8, MAX_PIC_HDR_SIZE = 10, MAX_PIC_EXT_SIZE = 11, MAX_SLICE_HDR_SIZE = 8, }; if (!ensure_hw_profile (encoder)) return GST_VAAPI_ENCODER_STATUS_ERROR_UNSUPPORTED_PROFILE; base_encoder->num_ref_frames = 2; /* Only YUV 4:2:0 formats are supported for now. This means that we have a limit of 4608 bits per macroblock. */ base_encoder->codedbuf_size = (GST_ROUND_UP_16 (vip->width) * GST_ROUND_UP_16 (vip->height) / 256) * 576; /* Account for Sequence, GOP, and Picture headers */ /* XXX: exclude unused Sequence Display Extension, Sequence Scalable Extension, Quantization Matrix Extension, Picture Display Extension, Picture Temporal Scalable Extension, Picture Spatial Scalable Extension */ base_encoder->codedbuf_size += MAX_SEQ_HDR_SIZE + MAX_SEQ_EXT_SIZE + MAX_GOP_SIZE + MAX_PIC_HDR_SIZE + MAX_PIC_EXT_SIZE; /* Account for Slice headers. We use one slice per line of macroblock */ base_encoder->codedbuf_size += (GST_ROUND_UP_16 (vip->height) / 16) * MAX_SLICE_HDR_SIZE; return GST_VAAPI_ENCODER_STATUS_SUCCESS; }
static gboolean compute_h264_decode_picture_buffer_length (GstVtdec * vtdec, GstBuffer * codec_data, int *length) { int profile, level; int dpb_mb_size = 16; int max_dpb_size_frames = 16; int max_dpb_mb_s = -1; int width_in_mb_s = GST_ROUND_UP_16 (vtdec->video_info.width) / dpb_mb_size; int height_in_mb_s = GST_ROUND_UP_16 (vtdec->video_info.height) / dpb_mb_size; *length = 0; if (!parse_h264_profile_and_level_from_codec_data (vtdec, codec_data, &profile, &level)) return FALSE; if (vtdec->video_info.width == 0 || vtdec->video_info.height == 0) return FALSE; GST_INFO_OBJECT (vtdec, "parsed profile %d, level %d", profile, level); if (profile == 66) { /* baseline or constrained-baseline, we don't need to reorder */ return TRUE; } max_dpb_mb_s = get_dpb_max_mb_s_from_level (vtdec, level); if (max_dpb_mb_s == -1) { GST_ELEMENT_ERROR (vtdec, STREAM, DECODE, (NULL), ("invalid level in codec_data, could not compute max_dpb_mb_s")); return FALSE; } /* this formula is specified in sections A.3.1.h and A.3.2.f of the 2009 * edition of the standard */ *length = MIN (floor (max_dpb_mb_s / (width_in_mb_s * height_in_mb_s)), max_dpb_size_frames); return TRUE; }
static GstVaapiEncoderStatus set_context_info (GstVaapiEncoder * base_encoder) { GstVaapiEncoderVP8 *encoder = GST_VAAPI_ENCODER_VP8_CAST (base_encoder); GstVideoInfo *const vip = GST_VAAPI_ENCODER_VIDEO_INFO (encoder); /* Maximum sizes for common headers (in bytes) */ enum { MAX_FRAME_TAG_SIZE = 10, MAX_UPDATE_SEGMENTATION_SIZE = 13, MAX_MB_LF_ADJUSTMENTS_SIZE = 9, MAX_QUANT_INDICES_SIZE = 5, MAX_TOKEN_PROB_UPDATE_SIZE = 1188, MAX_MV_PROBE_UPDATE_SIZE = 38, MAX_REST_OF_FRAME_HDR_SIZE = 15 }; if (!ensure_hw_profile (encoder)) return GST_VAAPI_ENCODER_STATUS_ERROR_UNSUPPORTED_PROFILE; base_encoder->num_ref_frames = 3; /* Only YUV 4:2:0 formats are supported for now. */ /* Assumig 4 times compression ratio */ base_encoder->codedbuf_size = GST_ROUND_UP_16 (vip->width) * GST_ROUND_UP_16 (vip->height) * 12 / 4; base_encoder->codedbuf_size += MAX_FRAME_TAG_SIZE + MAX_UPDATE_SEGMENTATION_SIZE + MAX_MB_LF_ADJUSTMENTS_SIZE + MAX_QUANT_INDICES_SIZE + MAX_TOKEN_PROB_UPDATE_SIZE + MAX_MV_PROBE_UPDATE_SIZE + MAX_REST_OF_FRAME_HDR_SIZE; return GST_VAAPI_ENCODER_STATUS_SUCCESS; }
/** * gst_buffer_list_new_sized: * @size: an initial reserved size * * Creates a new, empty #GstBufferList. The caller is responsible for unreffing * the returned #GstBufferList. The list will have @size space preallocated so * that memory reallocations can be avoided. * * Free-function: gst_buffer_list_unref * * Returns: (transfer full): the new #GstBufferList. gst_buffer_list_unref() * after usage. */ GstBufferList * gst_buffer_list_new_sized (guint size) { GstBufferList *list; gsize slice_size; guint n_allocated; n_allocated = GST_ROUND_UP_16 (size); slice_size = sizeof (GstBufferList) + (n_allocated - 1) * sizeof (gpointer); list = g_slice_alloc0 (slice_size); GST_LOG ("new %p", list); gst_buffer_list_init (list, n_allocated, slice_size); return list; }
static int fill_planes (GstVideoInfo * info) { gsize width, height, cr_h; width = (gsize) info->width; height = (gsize) info->height; switch (info->finfo->format) { case GST_VIDEO_FORMAT_YUY2: case GST_VIDEO_FORMAT_YVYU: case GST_VIDEO_FORMAT_UYVY: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_xBGR: case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_r210: info->stride[0] = width * 4; info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_RGB16: case GST_VIDEO_FORMAT_BGR16: case GST_VIDEO_FORMAT_RGB15: case GST_VIDEO_FORMAT_BGR15: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_v308: info->stride[0] = GST_ROUND_UP_4 (width * 3); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_v210: info->stride[0] = ((width + 47) / 48) * 128; info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_v216: info->stride[0] = GST_ROUND_UP_8 (width * 4); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_GRAY8: info->stride[0] = GST_ROUND_UP_4 (width); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_GRAY16_BE: case GST_VIDEO_FORMAT_GRAY16_LE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_UYVP: info->stride[0] = GST_ROUND_UP_4 ((width * 2 * 5 + 3) / 4); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_RGB8P: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = 4; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->size = info->offset[1] + (4 * 256); break; case GST_VIDEO_FORMAT_IYU1: info->stride[0] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) + GST_ROUND_UP_4 (width) / 2); info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_ARGB64: case GST_VIDEO_FORMAT_AYUV64: info->stride[0] = width * 8; info->offset[0] = 0; info->size = info->stride[0] * height; break; case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_YV12: /* same as I420, but plane 1+2 swapped */ info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->size = info->offset[2] + info->stride[2] * cr_h; break; case GST_VIDEO_FORMAT_Y41B: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_16 (width) / 4; info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * height; /* simplification of ROUNDUP4(w)*h + 2*((ROUNDUP16(w)/4)*h */ info->size = (info->stride[0] + (GST_ROUND_UP_16 (width) / 2)) * height; break; case GST_VIDEO_FORMAT_Y42B: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_8 (width) / 2; info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * height; /* simplification of ROUNDUP4(w)*h + 2*(ROUNDUP8(w)/2)*h */ info->size = (info->stride[0] + GST_ROUND_UP_8 (width)) * height; break; case GST_VIDEO_FORMAT_Y444: case GST_VIDEO_FORMAT_GBR: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; info->size = info->stride[0] * height * 3; break; case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV21: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->size = info->offset[1] + info->stride[0] * cr_h; break; case GST_VIDEO_FORMAT_NV16: case GST_VIDEO_FORMAT_NV61: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->size = info->stride[0] * height * 2; break; case GST_VIDEO_FORMAT_NV24: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->size = info->stride[0] * height + info->stride[1] * height; break; case GST_VIDEO_FORMAT_A420: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2); info->stride[2] = info->stride[1]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->offset[3] = info->offset[2] + info->stride[2] * cr_h; info->size = info->offset[3] + info->stride[0] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_YUV9: case GST_VIDEO_FORMAT_YVU9: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; cr_h = GST_ROUND_UP_4 (height) / 4; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->size = info->offset[2] + info->stride[2] * cr_h; break; case GST_VIDEO_FORMAT_I420_10LE: case GST_VIDEO_FORMAT_I420_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->size = info->offset[2] + info->stride[2] * cr_h; break; case GST_VIDEO_FORMAT_I422_10LE: case GST_VIDEO_FORMAT_I422_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * GST_ROUND_UP_2 (height); info->size = info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_Y444_10LE: case GST_VIDEO_FORMAT_Y444_10BE: case GST_VIDEO_FORMAT_GBR_10LE: case GST_VIDEO_FORMAT_GBR_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; info->size = info->stride[0] * height * 3; break; case GST_VIDEO_FORMAT_NV12_64Z32: info->stride[0] = GST_VIDEO_TILE_MAKE_STRIDE (GST_ROUND_UP_128 (width) / 64, GST_ROUND_UP_32 (height) / 32); info->stride[1] = GST_VIDEO_TILE_MAKE_STRIDE (GST_ROUND_UP_128 (width) / 64, GST_ROUND_UP_64 (height) / 64); info->offset[0] = 0; info->offset[1] = GST_ROUND_UP_128 (width) * GST_ROUND_UP_32 (height); info->size = info->offset[1] + GST_ROUND_UP_128 (width) * GST_ROUND_UP_64 (height) / 2; break; case GST_VIDEO_FORMAT_A420_10LE: case GST_VIDEO_FORMAT_A420_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); cr_h = GST_ROUND_UP_2 (height) / 2; if (GST_VIDEO_INFO_IS_INTERLACED (info)) cr_h = GST_ROUND_UP_2 (cr_h); info->offset[2] = info->offset[1] + info->stride[1] * cr_h; info->offset[3] = info->offset[2] + info->stride[2] * cr_h; info->size = info->offset[3] + info->stride[0] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_A422_10LE: case GST_VIDEO_FORMAT_A422_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * GST_ROUND_UP_2 (height); info->offset[3] = info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height); info->size = info->offset[3] + info->stride[0] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_A444_10LE: case GST_VIDEO_FORMAT_A444_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; info->offset[3] = info->offset[1] * 3; info->size = info->stride[0] * height * 4; break; case GST_VIDEO_FORMAT_ENCODED: break; case GST_VIDEO_FORMAT_UNKNOWN: GST_ERROR ("invalid format"); g_warning ("invalid format"); break; } return 0; }
static void omx_setup (GstOmxBaseFilter *omx_base) { GstOmxJpegEnc *self; GOmxCore *gomx; self = GST_OMX_JPEGENC (omx_base); gomx = (GOmxCore *) omx_base->gomx; GST_INFO_OBJECT (omx_base, "begin"); { OMX_PARAM_PORTDEFINITIONTYPE param; /* Output port configuration. */ G_OMX_PORT_GET_DEFINITION (omx_base->out_port, ¶m); param.format.image.eCompressionFormat = OMX_IMAGE_CodingJPEG; G_OMX_PORT_SET_DEFINITION (omx_base->out_port, ¶m); /* some workarounds required for TI components. */ { guint32 fourcc; gint width, height; /* the component should do this instead */ { G_OMX_PORT_GET_DEFINITION (omx_base->in_port, ¶m); width = param.format.image.nFrameWidth; height = param.format.image.nFrameHeight; fourcc = g_omx_colorformat_to_fourcc ( param.format.image.eColorFormat); /* this is against the standard; nBufferSize is read-only. */ param.nBufferSize = gst_video_format_get_size ( gst_video_format_from_fourcc (fourcc), GST_ROUND_UP_16 (width), GST_ROUND_UP_16 (height)); G_OMX_PORT_SET_DEFINITION (omx_base->in_port, ¶m); } /* the component should do this instead */ { G_OMX_PORT_GET_DEFINITION (omx_base->out_port, ¶m); param.nBufferSize = width * height; param.format.image.nFrameWidth = width; param.format.image.nFrameHeight = height; G_OMX_PORT_SET_DEFINITION (omx_base->out_port, ¶m); } } } { OMX_IMAGE_PARAM_QFACTORTYPE param; G_OMX_PORT_GET_PARAM (omx_base->out_port, OMX_IndexParamQFactor, ¶m); param.nQFactor = self->quality; G_OMX_PORT_SET_PARAM (omx_base->out_port, OMX_IndexParamQFactor, ¶m); } GST_INFO_OBJECT (omx_base, "end"); }
/* Copy/pasted from 0.11 video.c */ static int fill_planes (GstBlendVideoFormatInfo * info) { gint width, height; width = info->width; height = info->height; switch (info->fmt) { case GST_VIDEO_FORMAT_YUY2: case GST_VIDEO_FORMAT_YVYU: case GST_VIDEO_FORMAT_UYVY: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; break; case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_RGBx: case GST_VIDEO_FORMAT_RGBA: case GST_VIDEO_FORMAT_BGRx: case GST_VIDEO_FORMAT_BGRA: case GST_VIDEO_FORMAT_xRGB: case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_xBGR: case GST_VIDEO_FORMAT_ABGR: case GST_VIDEO_FORMAT_r210: info->stride[0] = width * 4; info->offset[0] = 0; break; case GST_VIDEO_FORMAT_RGB16: case GST_VIDEO_FORMAT_BGR16: case GST_VIDEO_FORMAT_RGB15: case GST_VIDEO_FORMAT_BGR15: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; break; case GST_VIDEO_FORMAT_RGB: case GST_VIDEO_FORMAT_BGR: case GST_VIDEO_FORMAT_v308: info->stride[0] = GST_ROUND_UP_4 (width * 3); info->offset[0] = 0; break; case GST_VIDEO_FORMAT_v210: info->stride[0] = ((width + 47) / 48) * 128; info->offset[0] = 0; break; case GST_VIDEO_FORMAT_v216: info->stride[0] = GST_ROUND_UP_8 (width * 4); info->offset[0] = 0; break; case GST_VIDEO_FORMAT_GRAY8: case GST_VIDEO_FORMAT_Y800: info->stride[0] = GST_ROUND_UP_4 (width); info->offset[0] = 0; break; case GST_VIDEO_FORMAT_GRAY16_BE: case GST_VIDEO_FORMAT_GRAY16_LE: case GST_VIDEO_FORMAT_Y16: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->offset[0] = 0; break; case GST_VIDEO_FORMAT_UYVP: info->stride[0] = GST_ROUND_UP_4 ((width * 2 * 5 + 3) / 4); info->offset[0] = 0; break; case GST_VIDEO_FORMAT_RGB8_PALETTED: info->stride[0] = GST_ROUND_UP_4 (width); info->offset[0] = 0; break; case GST_VIDEO_FORMAT_IYU1: info->stride[0] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) + GST_ROUND_UP_4 (width) / 2); info->offset[0] = 0; break; case GST_VIDEO_FORMAT_ARGB64: case GST_VIDEO_FORMAT_AYUV64: info->stride[0] = width * 8; info->offset[0] = 0; break; case GST_VIDEO_FORMAT_I420: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * (GST_ROUND_UP_2 (height) / 2); break; case GST_VIDEO_FORMAT_YV12: /* same as I420, but plane 1+2 swapped */ info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[2] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[1] = info->offset[2] + info->stride[1] * (GST_ROUND_UP_2 (height) / 2); break; case GST_VIDEO_FORMAT_Y41B: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_16 (width) / 4; info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * height; /* simplification of ROUNDUP4(w)*h + 2*((ROUNDUP16(w)/4)*h */ break; case GST_VIDEO_FORMAT_Y42B: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_8 (width) / 2; info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * height; /* simplification of ROUNDUP4(w)*h + 2*(ROUNDUP8(w)/2)*h */ break; case GST_VIDEO_FORMAT_Y444: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; break; case GST_VIDEO_FORMAT_NV12: case GST_VIDEO_FORMAT_NV21: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_A420: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2); info->stride[2] = info->stride[1]; info->stride[3] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * (GST_ROUND_UP_2 (height) / 2); info->offset[3] = info->offset[2] + info->stride[2] * (GST_ROUND_UP_2 (height) / 2); break; case GST_VIDEO_FORMAT_YUV9: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] + info->stride[1] * (GST_ROUND_UP_4 (height) / 4); break; case GST_VIDEO_FORMAT_YVU9: info->stride[0] = GST_ROUND_UP_4 (width); info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[2] = info->stride[0] * height; info->offset[1] = info->offset[2] + info->stride[1] * (GST_ROUND_UP_4 (height) / 4); break; case GST_VIDEO_FORMAT_I420_10LE: case GST_VIDEO_FORMAT_I420_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * (GST_ROUND_UP_2 (height) / 2); info->size = info->offset[2] + info->stride[2] * (GST_ROUND_UP_2 (height) / 2); break; case GST_VIDEO_FORMAT_I422_10LE: case GST_VIDEO_FORMAT_I422_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = GST_ROUND_UP_4 (width); info->stride[2] = info->stride[1]; info->offset[0] = 0; info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height); info->offset[2] = info->offset[1] + info->stride[1] * GST_ROUND_UP_2 (height); info->size = info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height); break; case GST_VIDEO_FORMAT_Y444_10LE: case GST_VIDEO_FORMAT_Y444_10BE: info->stride[0] = GST_ROUND_UP_4 (width * 2); info->stride[1] = info->stride[0]; info->stride[2] = info->stride[0]; info->offset[0] = 0; info->offset[1] = info->stride[0] * height; info->offset[2] = info->offset[1] * 2; info->size = info->stride[0] * height * 3; break; case GST_VIDEO_FORMAT_UNKNOWN: default: GST_ERROR ("invalid format"); g_warning ("invalid format"); break; } return 0; }
static gboolean theora_enc_sink_setcaps (GstPad * pad, GstCaps * caps) { GstStructure *structure = gst_caps_get_structure (caps, 0); GstTheoraEnc *enc = GST_THEORA_ENC (gst_pad_get_parent (pad)); guint32 fourcc; const GValue *par; gint fps_n, fps_d; gst_structure_get_fourcc (structure, "format", &fourcc); gst_structure_get_int (structure, "width", &enc->width); gst_structure_get_int (structure, "height", &enc->height); gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d); par = gst_structure_get_value (structure, "pixel-aspect-ratio"); th_info_clear (&enc->info); th_info_init (&enc->info); /* Theora has a divisible-by-sixteen restriction for the encoded video size but * we can define a picture area using pic_width/pic_height */ enc->info.frame_width = GST_ROUND_UP_16 (enc->width); enc->info.frame_height = GST_ROUND_UP_16 (enc->height); enc->info.pic_width = enc->width; enc->info.pic_height = enc->height; switch (fourcc) { case GST_MAKE_FOURCC ('I', '4', '2', '0'): enc->info.pixel_fmt = TH_PF_420; break; case GST_MAKE_FOURCC ('Y', '4', '2', 'B'): enc->info.pixel_fmt = TH_PF_422; break; case GST_MAKE_FOURCC ('Y', '4', '4', '4'): enc->info.pixel_fmt = TH_PF_444; break; default: g_assert_not_reached (); } enc->info.fps_numerator = enc->fps_n = fps_n; enc->info.fps_denominator = enc->fps_d = fps_d; if (par) { enc->info.aspect_numerator = gst_value_get_fraction_numerator (par); enc->info.aspect_denominator = gst_value_get_fraction_denominator (par); } else { /* setting them to 0 indicates that the decoder can chose a good aspect * ratio, defaulting to 1/1 */ enc->info.aspect_numerator = 0; enc->info.aspect_denominator = 0; } enc->info.colorspace = TH_CS_UNSPECIFIED; /* as done in theora */ enc->info.keyframe_granule_shift = _ilog (enc->keyframe_force - 1); GST_DEBUG_OBJECT (enc, "keyframe_frequency_force is %d, granule shift is %d", enc->keyframe_force, enc->info.keyframe_granule_shift); theora_enc_reset (enc); enc->initialised = TRUE; gst_object_unref (enc); return TRUE; }
static gboolean gst_msdkdec_set_src_caps (GstMsdkDec * thiz, gboolean need_allocation) { GstVideoCodecState *output_state; GstVideoInfo *vinfo; GstVideoAlignment align; GstCaps *allocation_caps = NULL; GstVideoFormat format; guint width, height; const gchar *format_str; /* use display width and display height in output state which * will be using for caps negotiation */ width = thiz->param.mfx.FrameInfo.CropW ? thiz->param.mfx. FrameInfo.CropW : GST_VIDEO_INFO_WIDTH (&thiz->input_state->info); height = thiz->param.mfx.FrameInfo.CropH ? thiz->param.mfx. FrameInfo.CropH : GST_VIDEO_INFO_HEIGHT (&thiz->input_state->info); format = gst_msdk_get_video_format_from_mfx_fourcc (thiz->param.mfx. FrameInfo.FourCC); if (format == GST_VIDEO_FORMAT_UNKNOWN) { GST_WARNING_OBJECT (thiz, "Failed to find a valid video format\n"); return FALSE; } output_state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (thiz), format, width, height, thiz->input_state); if (!output_state) return FALSE; /* Ensure output_state->caps and info has same width and height * Also mandate the 32 bit alignment */ vinfo = &output_state->info; gst_msdk_set_video_alignment (vinfo, &align); gst_video_info_align (vinfo, &align); output_state->caps = gst_video_info_to_caps (vinfo); if (srcpad_can_dmabuf (thiz)) gst_caps_set_features (output_state->caps, 0, gst_caps_features_new (GST_CAPS_FEATURE_MEMORY_DMABUF, NULL)); thiz->output_info = output_state->info; if (need_allocation) { /* Find allocation width and height */ width = GST_ROUND_UP_16 (thiz->param.mfx.FrameInfo.Width ? thiz->param.mfx. FrameInfo.Width : GST_VIDEO_INFO_WIDTH (&output_state->info)); height = GST_ROUND_UP_32 (thiz->param.mfx.FrameInfo.Height ? thiz->param.mfx. FrameInfo.Height : GST_VIDEO_INFO_HEIGHT (&output_state->info)); /* set allocation width and height in allocation_caps * which may or may not be similar to the output_state caps */ allocation_caps = gst_caps_copy (output_state->caps); format_str = gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&thiz->output_info)); gst_caps_set_simple (allocation_caps, "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, "format", G_TYPE_STRING, format_str, NULL); GST_INFO_OBJECT (thiz, "new alloc caps = %" GST_PTR_FORMAT, allocation_caps); gst_caps_replace (&thiz->allocation_caps, allocation_caps); } else { /* We keep the allocation parameters as it is to avoid pool renegotiation. * For codecs like VP9, dynamic resolution change doesn't requires allocation * reset if the new video frame resolution is lower than the * already configured one */ allocation_caps = gst_caps_copy (thiz->allocation_caps); } gst_caps_replace (&output_state->allocation_caps, allocation_caps); if (allocation_caps) gst_caps_unref (allocation_caps); gst_video_codec_state_unref (output_state); return TRUE; }
static gboolean gst_msdkdec_init_decoder (GstMsdkDec * thiz) { GstVideoInfo *info; mfxSession session; mfxStatus status; mfxFrameAllocRequest request; if (thiz->initialized) return TRUE; if (!thiz->context) { GST_WARNING_OBJECT (thiz, "No MSDK Context"); return FALSE; } if (!thiz->input_state) { GST_DEBUG_OBJECT (thiz, "Have no input state yet"); return FALSE; } info = &thiz->input_state->info; GST_OBJECT_LOCK (thiz); if (thiz->use_video_memory) { gst_msdk_set_frame_allocator (thiz->context); thiz->param.IOPattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY; } else { thiz->param.IOPattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY; } GST_INFO_OBJECT (thiz, "This MSDK decoder uses %s memory", thiz->use_video_memory ? "video" : "system"); thiz->param.AsyncDepth = thiz->async_depth; /* We expect msdk to fill the width and height values */ g_return_val_if_fail (thiz->param.mfx.FrameInfo.Width && thiz->param.mfx.FrameInfo.Height, FALSE); /* Force 32 bit rounding to avoid messing up of memory alignment when * dealing with different allocators */ /* Fixme: msdk sometimes only requires 16 bit rounding, optimization possible */ thiz->param.mfx.FrameInfo.Width = GST_ROUND_UP_16 (thiz->param.mfx.FrameInfo.Width); thiz->param.mfx.FrameInfo.Height = GST_ROUND_UP_32 (thiz->param.mfx.FrameInfo.Height); /* Set framerate only if provided. * If not, framerate will be assumed inside the driver. * Also we respect the upstream provided fps values */ if (info->fps_n > 0 && info->fps_d > 0 && info->fps_n != thiz->param.mfx.FrameInfo.FrameRateExtN && info->fps_d != thiz->param.mfx.FrameInfo.FrameRateExtD) { thiz->param.mfx.FrameInfo.FrameRateExtN = info->fps_n; thiz->param.mfx.FrameInfo.FrameRateExtD = info->fps_d; } if (info->par_n && info->par_d && !thiz->param.mfx.FrameInfo.AspectRatioW && !thiz->param.mfx.FrameInfo.AspectRatioH) { thiz->param.mfx.FrameInfo.AspectRatioW = info->par_n; thiz->param.mfx.FrameInfo.AspectRatioH = info->par_d; } thiz->param.mfx.FrameInfo.PicStruct = thiz->param.mfx.FrameInfo.PicStruct ? thiz->param.mfx. FrameInfo.PicStruct : MFX_PICSTRUCT_PROGRESSIVE; thiz->param.mfx.FrameInfo.FourCC = thiz->param.mfx.FrameInfo.FourCC ? thiz->param.mfx. FrameInfo.FourCC : MFX_FOURCC_NV12; thiz->param.mfx.FrameInfo.ChromaFormat = thiz->param.mfx.FrameInfo.ChromaFormat ? thiz->param.mfx. FrameInfo.ChromaFormat : MFX_CHROMAFORMAT_YUV420; session = gst_msdk_context_get_session (thiz->context); /* validate parameters and allow the Media SDK to make adjustments */ status = MFXVideoDECODE_Query (session, &thiz->param, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Video Decode Query failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Video Decode Query returned: %s", msdk_status_to_string (status)); } /* Force the structure to MFX_PICSTRUCT_PROGRESSIVE if it is unknow to * work-around MSDK issue: * https://github.com/Intel-Media-SDK/MediaSDK/issues/1139 */ if (thiz->param.mfx.FrameInfo.PicStruct == MFX_PICSTRUCT_UNKNOWN) thiz->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; status = MFXVideoDECODE_QueryIOSurf (session, &thiz->param, &request); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Query IO surfaces failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Query IO surfaces returned: %s", msdk_status_to_string (status)); } if (request.NumFrameSuggested < thiz->param.AsyncDepth) { GST_ERROR_OBJECT (thiz, "Required %d surfaces (%d suggested), async %d", request.NumFrameMin, request.NumFrameSuggested, thiz->param.AsyncDepth); goto failed; } /* account the downstream requirement */ if (G_LIKELY (thiz->min_prealloc_buffers)) request.NumFrameSuggested += thiz->min_prealloc_buffers; else GST_WARNING_OBJECT (thiz, "Allocating resources without considering the downstream requirement" "or extra scratch surface count"); if (thiz->use_video_memory) { gint shared_async_depth; shared_async_depth = gst_msdk_context_get_shared_async_depth (thiz->context); request.NumFrameSuggested += shared_async_depth; request.Type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET; if (thiz->use_dmabuf) request.Type |= MFX_MEMTYPE_EXPORT_FRAME; gst_msdk_frame_alloc (thiz->context, &request, &thiz->alloc_resp); } /* update the prealloc_buffer count which will be used later * as GstBufferPool min_buffers */ thiz->min_prealloc_buffers = request.NumFrameSuggested; GST_DEBUG_OBJECT (thiz, "Required %d surfaces (%d suggested)", request.NumFrameMin, request.NumFrameSuggested); status = MFXVideoDECODE_Init (session, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Init failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Init returned: %s", msdk_status_to_string (status)); } status = MFXVideoDECODE_GetVideoParam (session, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Get Video Parameters failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Get Video Parameters returned: %s", msdk_status_to_string (status)); } g_array_set_size (thiz->tasks, 0); /* resets array content */ g_array_set_size (thiz->tasks, thiz->param.AsyncDepth); thiz->next_task = 0; GST_OBJECT_UNLOCK (thiz); thiz->initialized = TRUE; return TRUE; failed: GST_OBJECT_UNLOCK (thiz); return FALSE; }