int ff_qsv_decode_init(AVCodecContext *avctx, QSVContext *q) { mfxVideoParam param = { { 0 } }; int ret; q->iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY; if (avctx->hwaccel_context) { AVQSVContext *qsv = avctx->hwaccel_context; q->session = qsv->session; q->iopattern = qsv->iopattern; q->ext_buffers = qsv->ext_buffers; q->nb_ext_buffers = qsv->nb_ext_buffers; } if (!q->session) { ret = ff_qsv_init_internal_session(avctx, &q->internal_qs, NULL); if (ret < 0) return ret; q->session = q->internal_qs.session; } ret = ff_qsv_codec_id_to_mfx(avctx->codec_id); if (ret < 0) return ret; param.mfx.CodecId = ret; param.mfx.CodecProfile = avctx->profile; param.mfx.CodecLevel = avctx->level; param.mfx.FrameInfo.BitDepthLuma = 8; param.mfx.FrameInfo.BitDepthChroma = 8; param.mfx.FrameInfo.Shift = 0; param.mfx.FrameInfo.FourCC = MFX_FOURCC_NV12; param.mfx.FrameInfo.Width = avctx->coded_width; param.mfx.FrameInfo.Height = avctx->coded_height; param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420; param.IOPattern = q->iopattern; param.AsyncDepth = q->async_depth; param.ExtParam = q->ext_buffers; param.NumExtParam = q->nb_ext_buffers; ret = MFXVideoDECODE_Init(q->session, ¶m); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Error initializing the MFX video decoder\n"); return ff_qsv_error(ret); } return 0; }
static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, mfxSession session) { mfxVideoParam param = { { 0 } }; int ret; if (!q->async_fifo) { q->async_fifo = av_fifo_alloc((1 + q->async_depth) * (sizeof(mfxSyncPoint*) + sizeof(QSVFrame*))); if (!q->async_fifo) return AVERROR(ENOMEM); } ret = qsv_init_session(avctx, q, session); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n"); return ret; } ret = ff_qsv_codec_id_to_mfx(avctx->codec_id); if (ret < 0) return ret; param.mfx.CodecId = ret; param.mfx.CodecProfile = avctx->profile; param.mfx.CodecLevel = avctx->level; param.mfx.FrameInfo.BitDepthLuma = 8; param.mfx.FrameInfo.BitDepthChroma = 8; param.mfx.FrameInfo.Shift = 0; param.mfx.FrameInfo.FourCC = MFX_FOURCC_NV12; param.mfx.FrameInfo.Width = avctx->coded_width; param.mfx.FrameInfo.Height = avctx->coded_height; param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420; param.IOPattern = q->iopattern; param.AsyncDepth = q->async_depth; param.ExtParam = q->ext_buffers; param.NumExtParam = q->nb_ext_buffers; ret = MFXVideoDECODE_Init(q->session, ¶m); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Error initializing the MFX video decoder\n"); return ff_qsv_error(ret); } return 0; }
size_t IntelH264Decoder::Reset(const void* buf, size_t bufSize) //void IntelH264Decoder::Reset() { mfxVideoParam par = { 0 }; par.mfx.CodecId = MFX_CODEC_AVC; mfxBitstream bs = { 0 }; bs.Data = (uint8_t*) buf; bs.DataOffset = 0; bs.DataLength = bufSize; bs.MaxLength = bufSize; bs.DecodeTimeStamp = MFX_TIMESTAMP_UNKNOWN; bs.TimeStamp = MFX_TIMESTAMP_UNKNOWN; //MakeBitstream(Buf, bs); //bs.Data = (uint8_t*) tpair; //bs.DataOffset = 0; //bs.DataLength = sizeof(tpair); //bs.MaxLength = sizeof(tpair); mfxStatus err = MFXVideoDECODE_DecodeHeader(Session, &bs, &par); if (err == MFX_ERR_NONE) { par.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY | MFX_IOPATTERN_OUT_SYSTEM_MEMORY; err = MFXVideoDECODE_Init(Session, &par); if (err == MFX_ERR_NONE) { VideoParam = par; CreateWorkSurface(WorkSurface); IsInitialized = true; } else { Log()->Error("MFXVideoDECODE_Init failed: %d", err); } } return bs.DataOffset; }
static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q) { const AVPixFmtDescriptor *desc; mfxSession session = NULL; int iopattern = 0; mfxVideoParam param = { { 0 } }; int frame_width = avctx->coded_width; int frame_height = avctx->coded_height; int ret; desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt); if (!desc) return AVERROR_BUG; if (!q->async_fifo) { q->async_fifo = av_fifo_alloc((1 + q->async_depth) * (sizeof(mfxSyncPoint*) + sizeof(QSVFrame*))); if (!q->async_fifo) return AVERROR(ENOMEM); } if (avctx->pix_fmt == AV_PIX_FMT_QSV && avctx->hwaccel_context) { AVQSVContext *user_ctx = avctx->hwaccel_context; session = user_ctx->session; iopattern = user_ctx->iopattern; q->ext_buffers = user_ctx->ext_buffers; q->nb_ext_buffers = user_ctx->nb_ext_buffers; } if (avctx->hw_frames_ctx) { AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx; if (!iopattern) { if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME) iopattern = MFX_IOPATTERN_OUT_OPAQUE_MEMORY; else if (frames_hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET) iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY; } frame_width = frames_hwctx->surfaces[0].Info.Width; frame_height = frames_hwctx->surfaces[0].Info.Height; } if (!iopattern) iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY; q->iopattern = iopattern; ret = qsv_init_session(avctx, q, session, avctx->hw_frames_ctx); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n"); return ret; } ret = ff_qsv_codec_id_to_mfx(avctx->codec_id); if (ret < 0) return ret; param.mfx.CodecId = ret; param.mfx.CodecProfile = avctx->profile; param.mfx.CodecLevel = avctx->level; param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth; param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth; param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8; param.mfx.FrameInfo.FourCC = q->fourcc; param.mfx.FrameInfo.Width = frame_width; param.mfx.FrameInfo.Height = frame_height; param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420; param.IOPattern = q->iopattern; param.AsyncDepth = q->async_depth; param.ExtParam = q->ext_buffers; param.NumExtParam = q->nb_ext_buffers; ret = MFXVideoDECODE_Init(q->session, ¶m); if (ret < 0) return ff_qsv_print_error(avctx, ret, "Error initializing the MFX video decoder"); q->frame_info = param.mfx.FrameInfo; return 0; }
int ff_qsv_decode_init(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt) { mfxVideoParam param = { { 0 } }; mfxBitstream bs = { { { 0 } } }; int ret; enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV, AV_PIX_FMT_NV12, AV_PIX_FMT_NONE }; q->iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY; if (!q->session) { if (avctx->hwaccel_context) { AVQSVContext *qsv = avctx->hwaccel_context; q->session = qsv->session; q->iopattern = qsv->iopattern; q->ext_buffers = qsv->ext_buffers; q->nb_ext_buffers = qsv->nb_ext_buffers; } if (!q->session) { ret = ff_qsv_init_internal_session(avctx, &q->internal_qs, q->load_plugins); if (ret < 0) return ret; q->session = q->internal_qs.session; } } if (avpkt->size) { bs.Data = avpkt->data; bs.DataLength = avpkt->size; bs.MaxLength = bs.DataLength; bs.TimeStamp = avpkt->pts; } else return AVERROR_INVALIDDATA; ret = ff_qsv_codec_id_to_mfx(avctx->codec_id); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Unsupported codec_id %08x\n", avctx->codec_id); return ret; } param.mfx.CodecId = ret; ret = MFXVideoDECODE_DecodeHeader(q->session, &bs, ¶m); if (MFX_ERR_MORE_DATA==ret) { /* this code means that header not found so we return packet size to skip a current packet */ return avpkt->size; } else if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Decode header error %d\n", ret); return ff_qsv_error(ret); } param.IOPattern = q->iopattern; param.AsyncDepth = q->async_depth; param.ExtParam = q->ext_buffers; param.NumExtParam = q->nb_ext_buffers; param.mfx.FrameInfo.BitDepthLuma = 8; param.mfx.FrameInfo.BitDepthChroma = 8; ret = MFXVideoDECODE_Init(q->session, ¶m); if (ret < 0) { if (MFX_ERR_INVALID_VIDEO_PARAM==ret) { av_log(avctx, AV_LOG_ERROR, "Error initializing the MFX video decoder, unsupported video\n"); } else { av_log(avctx, AV_LOG_ERROR, "Error initializing the MFX video decoder %d\n", ret); } return ff_qsv_error(ret); } ret = ff_get_format(avctx, pix_fmts); if (ret < 0) return ret; avctx->pix_fmt = ret; avctx->profile = param.mfx.CodecProfile; avctx->level = param.mfx.CodecLevel; avctx->coded_width = param.mfx.FrameInfo.Width; avctx->coded_height = param.mfx.FrameInfo.Height; avctx->width = param.mfx.FrameInfo.CropW - param.mfx.FrameInfo.CropX; avctx->height = param.mfx.FrameInfo.CropH - param.mfx.FrameInfo.CropY; /* maximum decoder latency should be not exceed max DPB size for h.264 and HEVC which is 16 for both cases. So weare pre-allocating fifo big enough for 17 elements: */ if (!q->async_fifo) { q->async_fifo = av_fifo_alloc((1 + 16) * (sizeof(mfxSyncPoint) + sizeof(QSVFrame*))); if (!q->async_fifo) return AVERROR(ENOMEM); } q->input_fifo = av_fifo_alloc(1024*16); if (!q->input_fifo) return AVERROR(ENOMEM); q->engine_ready = 1; return 0; }
int ff_qsv_decode_init(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt) { mfxVideoParam param = { { 0 } }; mfxBitstream bs = { { { 0 } } }; int ret; q->iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY; if (!q->session) { if (avctx->hwaccel_context) { AVQSVContext *qsv = avctx->hwaccel_context; q->session = qsv->session; q->iopattern = qsv->iopattern; q->ext_buffers = qsv->ext_buffers; q->nb_ext_buffers = qsv->nb_ext_buffers; } if (!q->session) { ret = ff_qsv_init_internal_session(avctx, &q->internal_qs, NULL); if (ret < 0) return ret; q->session = q->internal_qs.session; } } if (avpkt->size) { bs.Data = avpkt->data; bs.DataLength = avpkt->size; bs.MaxLength = bs.DataLength; bs.TimeStamp = avpkt->pts; } else return AVERROR_INVALIDDATA; ret = ff_qsv_codec_id_to_mfx(avctx->codec_id); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Unsupported codec_id %08x\n", avctx->codec_id); return ret; } param.mfx.CodecId = ret; ret = MFXVideoDECODE_DecodeHeader(q->session, &bs, ¶m); if (MFX_ERR_MORE_DATA==ret) { return AVERROR(EAGAIN); } else if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Decode header error %d\n", ret); return ff_qsv_error(ret); } param.IOPattern = q->iopattern; param.AsyncDepth = q->async_depth; param.ExtParam = q->ext_buffers; param.NumExtParam = q->nb_ext_buffers; param.mfx.FrameInfo.BitDepthLuma = 8; param.mfx.FrameInfo.BitDepthChroma = 8; ret = MFXVideoDECODE_Init(q->session, ¶m); if (ret < 0) { av_log(avctx, AV_LOG_ERROR, "Error initializing the MFX video decoder\n"); return ff_qsv_error(ret); } avctx->pix_fmt = AV_PIX_FMT_NV12; avctx->profile = param.mfx.CodecProfile; avctx->level = param.mfx.CodecLevel; avctx->coded_width = param.mfx.FrameInfo.Width; avctx->coded_height = param.mfx.FrameInfo.Height; avctx->width = param.mfx.FrameInfo.CropW - param.mfx.FrameInfo.CropX; avctx->height = param.mfx.FrameInfo.CropH - param.mfx.FrameInfo.CropY; q->async_fifo = av_fifo_alloc((1 + q->async_depth) * (sizeof(mfxSyncPoint) + sizeof(QSVFrame*))); if (!q->async_fifo) return AVERROR(ENOMEM); return 0; }
STDMETHODIMP CDecMSDKMVC::Decode(const BYTE *buffer, int buflen, REFERENCE_TIME rtStart, REFERENCE_TIME rtStop, BOOL bSyncPoint, BOOL bDiscontinuity) { if (!m_mfxSession) return E_UNEXPECTED; HRESULT hr = S_OK; CBitstreamBuffer bsBuffer(&m_buff); mfxStatus sts = MFX_ERR_NONE; mfxBitstream bs = { 0 }; BOOL bFlush = (buffer == nullptr); if (rtStart >= -TIMESTAMP_OFFSET && rtStart != AV_NOPTS_VALUE) bs.TimeStamp = rtStart + TIMESTAMP_OFFSET; else bs.TimeStamp = MFX_TIMESTAMP_UNKNOWN; bs.DecodeTimeStamp = MFX_TIMESTAMP_UNKNOWN; if (!bFlush) { if (m_pAnnexBConverter) { BYTE *pOutBuffer = nullptr; int pOutSize = 0; hr = m_pAnnexBConverter->Convert(&pOutBuffer, &pOutSize, buffer, buflen); if (FAILED(hr)) return hr; bsBuffer.SetBuffer(pOutBuffer, pOutSize, true); } else { bsBuffer.SetBuffer((BYTE *)buffer, buflen, false); } // Check the buffer for SEI NALU, and some unwanted NALUs that need filtering // MSDK's SEI reading functionality is slightly buggy CH264Nalu nalu; nalu.SetBuffer(bsBuffer.GetBuffer(), bsBuffer.GetBufferSize(), 0); BOOL bNeedFilter = FALSE; while (nalu.ReadNext()) { if (nalu.GetType() == NALU_TYPE_SEI) { ParseSEI(nalu.GetDataBuffer() + 1, nalu.GetDataLength() - 1, bs.TimeStamp); } else if (nalu.GetType() == NALU_TYPE_EOSEQ) { bsBuffer.EnsureWriteable(); // This is rather ugly, and relies on the bitstream being AnnexB, so simply overwriting the EOS NAL with zero works. // In the future a more elaborate bitstream filter might be advised memset(bsBuffer.GetBuffer() + nalu.GetNALPos(), 0, 4); } } bs.Data = bsBuffer.GetBuffer(); bs.DataLength = mfxU32(bsBuffer.GetBufferSize()); bs.MaxLength = bs.DataLength; AddFrameToGOP(bs.TimeStamp); } if (!m_bDecodeReady) { sts = MFXVideoDECODE_DecodeHeader(m_mfxSession, &bs, &m_mfxVideoParams); if (sts == MFX_ERR_NOT_ENOUGH_BUFFER) { hr = AllocateMVCExtBuffers(); if (FAILED(hr)) return hr; sts = MFXVideoDECODE_DecodeHeader(m_mfxSession, &bs, &m_mfxVideoParams); } if (sts == MFX_ERR_NONE) { m_mfxVideoParams.IOPattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY; m_mfxVideoParams.AsyncDepth = ASYNC_DEPTH; sts = MFXVideoDECODE_Init(m_mfxSession, &m_mfxVideoParams); if (sts != MFX_ERR_NONE) { DbgLog((LOG_TRACE, 10, L"CDevMSDKMVC::Decode(): Error initializing the MSDK decoder (%d)", sts)); return E_FAIL; } if (m_mfxExtMVCSeq.NumView != 2) { DbgLog((LOG_TRACE, 10, L"CDevMSDKMVC::Decode(): Only MVC with two views is supported")); return E_FAIL; } DbgLog((LOG_TRACE, 10, L"CDevMSDKMVC::Decode(): Initialized MVC with View Ids %d, %d", m_mfxExtMVCSeq.View[0].ViewId, m_mfxExtMVCSeq.View[1].ViewId)); m_bDecodeReady = TRUE; } } if (!m_bDecodeReady) return S_FALSE; mfxSyncPoint sync = nullptr; // Loop over the decoder to ensure all data is being consumed while (1) { MVCBuffer *pInputBuffer = GetBuffer(); if (pInputBuffer == nullptr) return E_OUTOFMEMORY; mfxFrameSurface1 *outsurf = nullptr; sts = MFXVideoDECODE_DecodeFrameAsync(m_mfxSession, bFlush ? nullptr : &bs, &pInputBuffer->surface, &outsurf, &sync); if (sts == MFX_ERR_INCOMPATIBLE_VIDEO_PARAM) { DbgLog((LOG_TRACE, 10, L"CDevMSDKMVC::Decode(): Incompatible video parameters detected, flushing decoder")); bsBuffer.Clear(); bFlush = TRUE; m_bDecodeReady = FALSE; continue; } if (sync) { MVCBuffer * pOutputBuffer = FindBuffer(outsurf); pOutputBuffer->queued = 1; pOutputBuffer->sync = sync; HandleOutput(pOutputBuffer); continue; } if (sts != MFX_ERR_MORE_SURFACE && sts < 0) break; } if (!bs.DataOffset && !sync && !bFlush) { DbgLog((LOG_TRACE, 10, L"CDevMSDKMVC::Decode(): Decoder did not consume any data, discarding")); bs.DataOffset = mfxU32(bsBuffer.GetBufferSize()); } bsBuffer.Consume(bs.DataOffset); if (sts != MFX_ERR_MORE_DATA && sts < 0) { DbgLog((LOG_TRACE, 10, L"CDevMSDKMVC::Decode(): Error from Decode call (%d)", sts)); return S_FALSE; } return S_OK; }
static gboolean gst_msdkdec_init_decoder (GstMsdkDec * thiz) { GstVideoInfo *info; mfxSession session; mfxStatus status; mfxFrameAllocRequest request; if (thiz->initialized) return TRUE; if (!thiz->context) { GST_WARNING_OBJECT (thiz, "No MSDK Context"); return FALSE; } if (!thiz->input_state) { GST_DEBUG_OBJECT (thiz, "Have no input state yet"); return FALSE; } info = &thiz->input_state->info; GST_OBJECT_LOCK (thiz); if (thiz->use_video_memory) { gst_msdk_set_frame_allocator (thiz->context); thiz->param.IOPattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY; } else { thiz->param.IOPattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY; } GST_INFO_OBJECT (thiz, "This MSDK decoder uses %s memory", thiz->use_video_memory ? "video" : "system"); thiz->param.AsyncDepth = thiz->async_depth; /* We expect msdk to fill the width and height values */ g_return_val_if_fail (thiz->param.mfx.FrameInfo.Width && thiz->param.mfx.FrameInfo.Height, FALSE); /* Force 32 bit rounding to avoid messing up of memory alignment when * dealing with different allocators */ /* Fixme: msdk sometimes only requires 16 bit rounding, optimization possible */ thiz->param.mfx.FrameInfo.Width = GST_ROUND_UP_16 (thiz->param.mfx.FrameInfo.Width); thiz->param.mfx.FrameInfo.Height = GST_ROUND_UP_32 (thiz->param.mfx.FrameInfo.Height); /* Set framerate only if provided. * If not, framerate will be assumed inside the driver. * Also we respect the upstream provided fps values */ if (info->fps_n > 0 && info->fps_d > 0 && info->fps_n != thiz->param.mfx.FrameInfo.FrameRateExtN && info->fps_d != thiz->param.mfx.FrameInfo.FrameRateExtD) { thiz->param.mfx.FrameInfo.FrameRateExtN = info->fps_n; thiz->param.mfx.FrameInfo.FrameRateExtD = info->fps_d; } if (info->par_n && info->par_d && !thiz->param.mfx.FrameInfo.AspectRatioW && !thiz->param.mfx.FrameInfo.AspectRatioH) { thiz->param.mfx.FrameInfo.AspectRatioW = info->par_n; thiz->param.mfx.FrameInfo.AspectRatioH = info->par_d; } thiz->param.mfx.FrameInfo.PicStruct = thiz->param.mfx.FrameInfo.PicStruct ? thiz->param.mfx. FrameInfo.PicStruct : MFX_PICSTRUCT_PROGRESSIVE; thiz->param.mfx.FrameInfo.FourCC = thiz->param.mfx.FrameInfo.FourCC ? thiz->param.mfx. FrameInfo.FourCC : MFX_FOURCC_NV12; thiz->param.mfx.FrameInfo.ChromaFormat = thiz->param.mfx.FrameInfo.ChromaFormat ? thiz->param.mfx. FrameInfo.ChromaFormat : MFX_CHROMAFORMAT_YUV420; session = gst_msdk_context_get_session (thiz->context); /* validate parameters and allow the Media SDK to make adjustments */ status = MFXVideoDECODE_Query (session, &thiz->param, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Video Decode Query failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Video Decode Query returned: %s", msdk_status_to_string (status)); } /* Force the structure to MFX_PICSTRUCT_PROGRESSIVE if it is unknow to * work-around MSDK issue: * https://github.com/Intel-Media-SDK/MediaSDK/issues/1139 */ if (thiz->param.mfx.FrameInfo.PicStruct == MFX_PICSTRUCT_UNKNOWN) thiz->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; status = MFXVideoDECODE_QueryIOSurf (session, &thiz->param, &request); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Query IO surfaces failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Query IO surfaces returned: %s", msdk_status_to_string (status)); } if (request.NumFrameSuggested < thiz->param.AsyncDepth) { GST_ERROR_OBJECT (thiz, "Required %d surfaces (%d suggested), async %d", request.NumFrameMin, request.NumFrameSuggested, thiz->param.AsyncDepth); goto failed; } /* account the downstream requirement */ if (G_LIKELY (thiz->min_prealloc_buffers)) request.NumFrameSuggested += thiz->min_prealloc_buffers; else GST_WARNING_OBJECT (thiz, "Allocating resources without considering the downstream requirement" "or extra scratch surface count"); if (thiz->use_video_memory) { gint shared_async_depth; shared_async_depth = gst_msdk_context_get_shared_async_depth (thiz->context); request.NumFrameSuggested += shared_async_depth; request.Type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET; if (thiz->use_dmabuf) request.Type |= MFX_MEMTYPE_EXPORT_FRAME; gst_msdk_frame_alloc (thiz->context, &request, &thiz->alloc_resp); } /* update the prealloc_buffer count which will be used later * as GstBufferPool min_buffers */ thiz->min_prealloc_buffers = request.NumFrameSuggested; GST_DEBUG_OBJECT (thiz, "Required %d surfaces (%d suggested)", request.NumFrameMin, request.NumFrameSuggested); status = MFXVideoDECODE_Init (session, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Init failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Init returned: %s", msdk_status_to_string (status)); } status = MFXVideoDECODE_GetVideoParam (session, &thiz->param); if (status < MFX_ERR_NONE) { GST_ERROR_OBJECT (thiz, "Get Video Parameters failed (%s)", msdk_status_to_string (status)); goto failed; } else if (status > MFX_ERR_NONE) { GST_WARNING_OBJECT (thiz, "Get Video Parameters returned: %s", msdk_status_to_string (status)); } g_array_set_size (thiz->tasks, 0); /* resets array content */ g_array_set_size (thiz->tasks, thiz->param.AsyncDepth); thiz->next_task = 0; GST_OBJECT_UNLOCK (thiz); thiz->initialized = TRUE; return TRUE; failed: GST_OBJECT_UNLOCK (thiz); return FALSE; }