// Free decoder resources mfxStatus VideoDECODEH265::Close(void) { MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_API, "VideoDECODEH265::Close"); UMC::AutomaticUMCMutex guard(m_mGuard); if (!m_isInit || !m_pH265VideoDecoder.get()) return MFX_ERR_NOT_INITIALIZED; m_pH265VideoDecoder->Close(); m_FrameAllocator->Close(); if (m_response.NumFrameActual) m_core->FreeFrames(&m_response); if (m_response_alien.NumFrameActual) m_core->FreeFrames(&m_response_alien); m_isOpaq = false; m_isInit = false; m_isFirstRun = true; m_frameOrder = (mfxU16)MFX_FRAMEORDER_UNKNOWN; m_va = 0; memset(&m_stat, 0, sizeof(m_stat)); return MFX_ERR_NONE; }
static mfxStatus MFXVideoENCODELegacyRoutine(void *pState, void *pParam, mfxU32 threadNumber, mfxU32 callNumber) { MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_SCHED, "EncodeFrame"); VideoENCODE *pENCODE = (VideoENCODE *) pState; MFX_THREAD_TASK_PARAMETERS *pTaskParam = (MFX_THREAD_TASK_PARAMETERS *) pParam; mfxStatus mfxRes; // touch unreferenced parameter(s) callNumber = callNumber; // check error(s) if ((NULL == pState) || (NULL == pParam) || (0 != threadNumber)) { return MFX_ERR_NULL_PTR; } // call the obsolete method mfxRes = pENCODE->EncodeFrame(pTaskParam->encode.ctrl, &pTaskParam->encode.internal_params, pTaskParam->encode.surface, pTaskParam->encode.bs); return mfxRes; } // mfxStatus MFXVideoENCODELegacyRoutine(void *pState, void *pParam,
Ipp32u mfxSchedulerCore::scheduler_wakeup_thread_proc(void *pParam) { mfxSchedulerCore * const pSchedulerCore = (mfxSchedulerCore *) pParam; { char thread_name[30] = {0}; my_snprintf(thread_name, sizeof(thread_name)-1, "ThreadName=MSDKHWL#%d", 0); MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_SCHED, thread_name); } // main working cycle for threads while (false == pSchedulerCore->m_bQuitWakeUpThread) { vm_status vmRes; vmRes = vm_event_timed_wait(&pSchedulerCore->m_hwTaskDone, pSchedulerCore->m_timer_hw_event); // HW event is signaled. Reset all HW waiting tasks. if (VM_OK == vmRes|| VM_TIMEOUT == vmRes) { vmRes = vm_event_reset(&pSchedulerCore->m_hwTaskDone); //MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_SCHED, "HW Event"); pSchedulerCore->IncrementHWEventCounter(); pSchedulerCore->WakeUpThreads((mfxU32) MFX_INVALID_THREAD_ID, MFX_SCHEDULER_HW_BUFFER_COMPLETED); } } return 0x0ccedff; } // Ipp32u mfxSchedulerCore::scheduler_wakeup_thread_proc(void *pParam)
// Decoder threads entry point static mfxStatus __CDECL HEVCDECODERoutine(void *pState, void *pParam, mfxU32 threadNumber, mfxU32 ) { MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_API, "HEVCDECODERoutine"); VideoDECODEH265 *decoder = (VideoDECODEH265 *)pState; mfxStatus sts = decoder->RunThread(pParam, threadNumber); return sts; }
// Decoder instance threads entry point. Do async tasks here mfxStatus VideoDECODEH265::RunThread(void * params, mfxU32 threadNumber) { MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_API, "VideoDECODEH265::RunThread"); ThreadTaskInfo * info = (ThreadTaskInfo *)params; mfxStatus sts = MFX_TASK_WORKING; bool isDecoded; { UMC::AutomaticUMCMutex guard(m_mGuardRunThread); if (!info->surface_work) return MFX_TASK_DONE; isDecoded = m_pH265VideoDecoder->CheckDecoding(true, info->pFrame); } if (!isDecoded) { sts = m_pH265VideoDecoder->RunThread(threadNumber); } { UMC::AutomaticUMCMutex guard(m_mGuardRunThread); if (!info->surface_work) return MFX_TASK_DONE; isDecoded = m_pH265VideoDecoder->CheckDecoding(true, info->pFrame); if (isDecoded) { info->surface_work = 0; } } if (isDecoded) { if (!info->pFrame->wasDisplayed() && info->surface_out) { mfxStatus status = DecodeFrame(info->surface_out, info->pFrame); if (status != MFX_ERR_NONE && status != MFX_ERR_NOT_FOUND) return status; } return MFX_TASK_DONE; } return sts; }
// Actually calculate needed frames number mfxStatus VideoDECODEH265::QueryIOSurfInternal(eMFXPlatform platform, eMFXHWType type, mfxVideoParam *par, mfxFrameAllocRequest *request) { MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_HOTSPOTS, "VideoDECODEH265::QueryIOSurfInternal"); request->Info = par->mfx.FrameInfo; mfxU32 asyncDepth = CalculateAsyncDepth(platform, par); bool useDelayedDisplay = (ENABLE_DELAYED_DISPLAY_MODE != 0) && IsNeedToUseHWBuffering(type) && (asyncDepth != 1); mfxExtHEVCParam * hevcParam = (mfxExtHEVCParam *)GetExtendedBuffer(par->ExtParam, par->NumExtParam, MFX_EXTBUFF_HEVC_PARAM); if (hevcParam && (!hevcParam->PicWidthInLumaSamples || !hevcParam->PicHeightInLumaSamples)) // not initialized hevcParam = 0; mfxI32 dpbSize = 0; uint32_t level_idc = par->mfx.CodecLevel; if (hevcParam) dpbSize = CalculateDPBSize(level_idc, hevcParam->PicWidthInLumaSamples, hevcParam->PicHeightInLumaSamples, 0); else dpbSize = CalculateDPBSize(level_idc, par->mfx.FrameInfo.Width, par->mfx.FrameInfo.Height, 0) + 1; //1 extra for avoid aligned size issue if (par->mfx.MaxDecFrameBuffering && par->mfx.MaxDecFrameBuffering < dpbSize) dpbSize = par->mfx.MaxDecFrameBuffering; mfxU32 numMin = dpbSize + 1 + asyncDepth; if (platform != MFX_PLATFORM_SOFTWARE && useDelayedDisplay) // equals if (m_useDelayedDisplay) numMin += NUMBER_OF_ADDITIONAL_FRAMES; request->NumFrameMin = (mfxU16)numMin; request->NumFrameSuggested = request->NumFrameMin; if (MFX_PLATFORM_SOFTWARE == platform) { request->Type = MFX_MEMTYPE_SYSTEM_MEMORY | MFX_MEMTYPE_FROM_DECODE; } else { request->Type = MFX_MEMTYPE_DXVA2_DECODER_TARGET | MFX_MEMTYPE_FROM_DECODE; } return MFX_ERR_NONE; }
// Check if there is enough data to start decoding in async mode mfxStatus VideoDECODEH265::DecodeFrameCheck(mfxBitstream *bs, mfxFrameSurface1 *surface_work, mfxFrameSurface1 **surface_out) { MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_API, "VideoDECODEH265::DecodeFrameCheck"); if (!m_isInit) return MFX_ERR_NOT_INITIALIZED; MFX_CHECK_NULL_PTR2(surface_work, surface_out); mfxStatus sts = MFX_ERR_NONE; sts = bs ? CheckBitstream(bs) : MFX_ERR_NONE; if (sts != MFX_ERR_NONE) return sts; UMC::Status umcRes = UMC::UMC_OK; *surface_out = 0; if (m_isOpaq) { sts = CheckFrameInfoCodecs(&surface_work->Info, MFX_CODEC_HEVC, m_platform != MFX_PLATFORM_SOFTWARE); if (sts != MFX_ERR_NONE) return MFX_ERR_UNSUPPORTED; if (surface_work->Data.MemId || surface_work->Data.Y || surface_work->Data.R || surface_work->Data.A || surface_work->Data.UV) // opaq surface return MFX_ERR_UNDEFINED_BEHAVIOR; surface_work = GetOriginalSurface(surface_work); if (!surface_work) return MFX_ERR_UNDEFINED_BEHAVIOR; } sts = CheckFrameInfoCodecs(&surface_work->Info, MFX_CODEC_HEVC, m_platform != MFX_PLATFORM_SOFTWARE); if (sts != MFX_ERR_NONE) return MFX_ERR_INVALID_VIDEO_PARAM; sts = CheckFrameData(surface_work); if (sts != MFX_ERR_NONE) return sts; sts = m_FrameAllocator->SetCurrentMFXSurface(surface_work, m_isOpaq); if (sts != MFX_ERR_NONE) return sts; #ifdef MFX_MAX_DECODE_FRAMES if (m_stat.NumFrame >= MFX_MAX_DECODE_FRAMES) return MFX_ERR_UNDEFINED_BEHAVIOR; #endif sts = MFX_ERR_UNDEFINED_BEHAVIOR; try { bool force = false; UMC::Status umcFrameRes = UMC::UMC_OK; UMC::Status umcAddSourceRes = UMC::UMC_OK; MFXMediaDataAdapter src(bs); for (;;) { if (m_FrameAllocator->FindFreeSurface() == -1) { umcRes = UMC::UMC_ERR_NEED_FORCE_OUTPUT; } else { umcRes = m_pH265VideoDecoder->AddSource(bs ? &src : 0); } umcAddSourceRes = umcFrameRes = umcRes; if (umcRes == UMC::UMC_NTF_NEW_RESOLUTION || umcRes == UMC::UMC_WRN_REPOSITION_INPROGRESS || umcRes == UMC::UMC_ERR_UNSUPPORTED) { FillVideoParam(&m_vPar, true); } if (umcRes == UMC::UMC_WRN_REPOSITION_INPROGRESS) { if (!m_isFirstRun) { sts = MFX_WRN_VIDEO_PARAM_CHANGED; } else { umcAddSourceRes = umcFrameRes = umcRes = UMC::UMC_OK; m_isFirstRun = false; } } if (umcRes == UMC::UMC_ERR_INVALID_STREAM) { umcAddSourceRes = umcFrameRes = umcRes = UMC::UMC_OK; } if (umcRes == UMC::UMC_NTF_NEW_RESOLUTION) { sts = MFX_ERR_INCOMPATIBLE_VIDEO_PARAM; } if (umcRes == UMC::UMC_OK && m_FrameAllocator->FindFreeSurface() == -1) { sts = MFX_ERR_MORE_SURFACE; umcFrameRes = UMC::UMC_ERR_NOT_ENOUGH_BUFFER; } if (umcRes == UMC::UMC_ERR_NOT_ENOUGH_BUFFER || umcRes == UMC::UMC_WRN_INFO_NOT_READY || umcRes == UMC::UMC_ERR_NEED_FORCE_OUTPUT) { force = (umcRes == UMC::UMC_ERR_NEED_FORCE_OUTPUT); sts = umcRes == UMC::UMC_ERR_NOT_ENOUGH_BUFFER ? (mfxStatus)MFX_ERR_MORE_DATA_SUBMIT_TASK: MFX_WRN_DEVICE_BUSY; } if (umcRes == UMC::UMC_ERR_NOT_ENOUGH_DATA || umcRes == UMC::UMC_ERR_SYNC) { if ((!bs) || (bs->DataFlag == MFX_BITSTREAM_EOS)) force = true; sts = MFX_ERR_MORE_DATA; } #if defined (MFX_VA_LINUX) if (umcRes == UMC::UMC_ERR_DEVICE_FAILED) { sts = MFX_ERR_DEVICE_FAILED; } if (umcRes == UMC::UMC_ERR_GPU_HANG) { sts = MFX_ERR_GPU_HANG; } #endif { src.Save(bs); } if (sts == MFX_ERR_INCOMPATIBLE_VIDEO_PARAM) return sts; //return these errors immediatelly unless we have [input == 0] if (sts == MFX_ERR_DEVICE_FAILED || sts == MFX_ERR_GPU_HANG) { if (!bs) force = true; else return sts; } umcRes = m_pH265VideoDecoder->RunDecoding(); if (m_vInitPar.mfx.DecodedOrder) force = true; H265DecoderFrame *pFrame = GetFrameToDisplay_H265(force); // return frame to display if (pFrame) { FillOutputSurface(surface_out, surface_work, pFrame); m_frameOrder = (mfxU16)pFrame->m_frameOrder; (*surface_out)->Data.FrameOrder = m_frameOrder; return MFX_ERR_NONE; } *surface_out = 0; if (umcFrameRes != UMC::UMC_OK) break; } // for (;;) } catch(const h265_exception & ex) { FillVideoParam(&m_vPar, false); if (ex.GetStatus() == UMC::UMC_ERR_ALLOC) { // check incompatibility of video params if (m_vInitPar.mfx.FrameInfo.Width != m_vPar.mfx.FrameInfo.Width || m_vInitPar.mfx.FrameInfo.Height != m_vPar.mfx.FrameInfo.Height) { return MFX_ERR_INCOMPATIBLE_VIDEO_PARAM; } } return ConvertUMCStatusToMfx(ex.GetStatus()); } catch(const std::bad_alloc &) { return MFX_ERR_MEMORY_ALLOC; } catch(...) { return MFX_ERR_UNKNOWN; } return sts; }
// MediaSDK DECODE_QueryIOSurf API function mfxStatus VideoDECODEH265::QueryIOSurf(VideoCORE *core, mfxVideoParam *par, mfxFrameAllocRequest *request) { MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_API, "VideoDECODEH265::QueryIOSurf"); MFX_CHECK_NULL_PTR2(par, request); eMFXPlatform platform = MFX_Utility::GetPlatform_H265(core, par); eMFXHWType type = MFX_HW_UNKNOWN; if (platform == MFX_PLATFORM_HARDWARE) { type = core->GetHWType(); } mfxVideoParam params; params = *par; bool isNeedChangeVideoParamWarning = IsNeedChangeVideoParam(¶ms); if (!(par->IOPattern & MFX_IOPATTERN_OUT_VIDEO_MEMORY) && !(par->IOPattern & MFX_IOPATTERN_OUT_SYSTEM_MEMORY) && !(par->IOPattern & MFX_IOPATTERN_OUT_OPAQUE_MEMORY)) return MFX_ERR_INVALID_VIDEO_PARAM; if ((par->IOPattern & MFX_IOPATTERN_OUT_VIDEO_MEMORY) && (par->IOPattern & MFX_IOPATTERN_OUT_SYSTEM_MEMORY)) return MFX_ERR_INVALID_VIDEO_PARAM; if ((par->IOPattern & MFX_IOPATTERN_OUT_OPAQUE_MEMORY) && (par->IOPattern & MFX_IOPATTERN_OUT_SYSTEM_MEMORY)) return MFX_ERR_INVALID_VIDEO_PARAM; if ((par->IOPattern & MFX_IOPATTERN_OUT_OPAQUE_MEMORY) && (par->IOPattern & MFX_IOPATTERN_OUT_VIDEO_MEMORY)) return MFX_ERR_INVALID_VIDEO_PARAM; int32_t isInternalManaging = (MFX_PLATFORM_SOFTWARE == platform) ? (params.IOPattern & MFX_IOPATTERN_OUT_VIDEO_MEMORY) : (params.IOPattern & MFX_IOPATTERN_OUT_SYSTEM_MEMORY); mfxStatus sts = QueryIOSurfInternal(platform, type, ¶ms, request); if (sts != MFX_ERR_NONE) return sts; if (isInternalManaging) { request->NumFrameSuggested = request->NumFrameMin = (mfxU16)CalculateAsyncDepth(platform, par); if (MFX_PLATFORM_SOFTWARE == platform) request->Type = MFX_MEMTYPE_DXVA2_DECODER_TARGET | MFX_MEMTYPE_FROM_DECODE; else request->Type = MFX_MEMTYPE_SYSTEM_MEMORY | MFX_MEMTYPE_FROM_DECODE; } if (par->IOPattern & MFX_IOPATTERN_OUT_OPAQUE_MEMORY) { request->Type |= MFX_MEMTYPE_OPAQUE_FRAME; } else { request->Type |= MFX_MEMTYPE_EXTERNAL_FRAME; } if (platform != core->GetPlatformType()) { VM_ASSERT(platform == MFX_PLATFORM_SOFTWARE); return MFX_ERR_UNSUPPORTED; } if (isNeedChangeVideoParamWarning) { return MFX_WRN_INCOMPATIBLE_VIDEO_PARAM; } return MFX_ERR_NONE; }
// Initialize decoder instance mfxStatus VideoDECODEH265::Init(mfxVideoParam *par) { MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_API, "VideoDECODEH265::Init"); UMC::AutomaticUMCMutex guard(m_mGuard); if (m_isInit) return MFX_ERR_UNDEFINED_BEHAVIOR; MFX_CHECK_NULL_PTR1(par); m_platform = MFX_Utility::GetPlatform_H265(m_core, par); eMFXHWType type = MFX_HW_UNKNOWN; if (m_platform == MFX_PLATFORM_HARDWARE) { type = m_core->GetHWType(); } if (CheckVideoParamDecoders(par, m_core->IsExternalFrameAllocator(), type) < MFX_ERR_NONE) return MFX_ERR_INVALID_VIDEO_PARAM; if (!MFX_Utility::CheckVideoParam_H265(par, type)) return MFX_ERR_INVALID_VIDEO_PARAM; m_vInitPar = *par; m_vFirstPar = *par; m_vFirstPar.mfx.NumThread = 0; bool isNeedChangeVideoParamWarning = IsNeedChangeVideoParam(&m_vFirstPar); m_vPar = m_vFirstPar; m_vPar.CreateExtendedBuffer(MFX_EXTBUFF_VIDEO_SIGNAL_INFO); m_vPar.CreateExtendedBuffer(MFX_EXTBUFF_CODING_OPTION_SPSPPS); m_vPar.CreateExtendedBuffer(MFX_EXTBUFF_HEVC_PARAM); mfxU32 asyncDepth = CalculateAsyncDepth(m_platform, par); m_vPar.mfx.NumThread = (mfxU16)CalculateNumThread(par, m_platform); if (MFX_PLATFORM_SOFTWARE == m_platform) { return MFX_ERR_UNSUPPORTED; } else { m_useDelayedDisplay = ENABLE_DELAYED_DISPLAY_MODE != 0 && IsNeedToUseHWBuffering(m_core->GetHWType()) && (asyncDepth != 1); bool useBigSurfacePoolWA = MFX_Utility::IsBugSurfacePoolApplicable(type, par); m_pH265VideoDecoder.reset(useBigSurfacePoolWA ? new VATaskSupplierBigSurfacePool<VATaskSupplier>() : new VATaskSupplier()); // HW m_FrameAllocator.reset(new mfx_UMC_FrameAllocator_D3D()); } int32_t useInternal = (MFX_PLATFORM_SOFTWARE == m_platform) ? (m_vPar.IOPattern & MFX_IOPATTERN_OUT_VIDEO_MEMORY) : (m_vPar.IOPattern & MFX_IOPATTERN_OUT_SYSTEM_MEMORY); if (m_vPar.IOPattern & MFX_IOPATTERN_OUT_OPAQUE_MEMORY) { mfxExtOpaqueSurfaceAlloc *pOpaqAlloc = (mfxExtOpaqueSurfaceAlloc *)GetExtendedBuffer(par->ExtParam, par->NumExtParam, MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION); if (!pOpaqAlloc) return MFX_ERR_INVALID_VIDEO_PARAM; useInternal = (m_platform == MFX_PLATFORM_SOFTWARE) ? !(pOpaqAlloc->Out.Type & MFX_MEMTYPE_SYSTEM_MEMORY) : (pOpaqAlloc->Out.Type & MFX_MEMTYPE_SYSTEM_MEMORY); } // allocate memory mfxFrameAllocRequest request; mfxFrameAllocRequest request_internal; memset(&request, 0, sizeof(request)); memset(&m_response, 0, sizeof(m_response)); memset(&m_response_alien, 0, sizeof(m_response_alien)); m_isOpaq = false; mfxStatus mfxSts = QueryIOSurfInternal(m_platform, type, &m_vPar, &request); if (mfxSts != MFX_ERR_NONE) return mfxSts; if (useInternal) request.Type |= MFX_MEMTYPE_INTERNAL_FRAME ; else request.Type |= MFX_MEMTYPE_EXTERNAL_FRAME; request_internal = request; // allocates external surfaces: bool mapOpaq = true; mfxExtOpaqueSurfaceAlloc *pOpqAlloc = 0; mfxSts = UpdateAllocRequest(par, &request, pOpqAlloc, mapOpaq); if (mfxSts < MFX_ERR_NONE) return mfxSts; if (m_isOpaq && !m_core->IsCompatibleForOpaq()) return MFX_ERR_UNDEFINED_BEHAVIOR; if (mapOpaq) { mfxSts = m_core->AllocFrames(&request, &m_response, pOpqAlloc->Out.Surfaces, pOpqAlloc->Out.NumSurface); } else { if (m_platform != MFX_PLATFORM_SOFTWARE && !useInternal) { request.AllocId = par->AllocId; mfxSts = m_core->AllocFrames(&request, &m_response, false); } } if (mfxSts < MFX_ERR_NONE) return mfxSts; // allocates internal surfaces: if (useInternal) { m_response_alien = m_response; m_FrameAllocator->SetExternalFramesResponse(&m_response_alien); request = request_internal; mfxSts = m_core->AllocFrames(&request_internal, &m_response, true); if (mfxSts < MFX_ERR_NONE) return mfxSts; } else { m_FrameAllocator->SetExternalFramesResponse(&m_response); } if (m_platform != MFX_PLATFORM_SOFTWARE) { mfxSts = m_core->CreateVA(&m_vFirstPar, &request, &m_response, m_FrameAllocator.get()); if (mfxSts < MFX_ERR_NONE) return mfxSts; } UMC::Status umcSts = m_FrameAllocator->InitMfx(0, m_core, &m_vFirstPar, &request, &m_response, !useInternal, m_platform == MFX_PLATFORM_SOFTWARE); if (umcSts != UMC::UMC_OK) return MFX_ERR_MEMORY_ALLOC; umcSts = m_MemoryAllocator.InitMem(0, m_core); if (umcSts != UMC::UMC_OK) return MFX_ERR_MEMORY_ALLOC; m_pH265VideoDecoder->SetFrameAllocator(m_FrameAllocator.get()); UMC::VideoDecoderParams umcVideoParams; ConvertMFXParamsToUMC(&m_vFirstPar, &umcVideoParams); umcVideoParams.numThreads = m_vPar.mfx.NumThread; umcVideoParams.info.bitrate = MFX_MAX(asyncDepth - umcVideoParams.numThreads, 0); // buffered frames if (MFX_PLATFORM_SOFTWARE != m_platform) { m_core->GetVA((mfxHDL*)&m_va, MFX_MEMTYPE_FROM_DECODE); umcVideoParams.pVideoAccelerator = m_va; static_cast<VATaskSupplier*>(m_pH265VideoDecoder.get())->SetVideoHardwareAccelerator(m_va); } umcVideoParams.lpMemoryAllocator = &m_MemoryAllocator; umcSts = m_pH265VideoDecoder->Init(&umcVideoParams); if (umcSts != UMC::UMC_OK) { return ConvertUMCStatusToMfx(umcSts); } m_isInit = true; m_frameOrder = (mfxU16)MFX_FRAMEORDER_UNKNOWN; m_isFirstRun = true; if (MFX_PLATFORM_SOFTWARE != m_platform && m_useDelayedDisplay) { static_cast<VATaskSupplier*>(m_pH265VideoDecoder.get())->SetBufferedFramesNumber(NUMBER_OF_ADDITIONAL_FRAMES); } m_pH265VideoDecoder->SetVideoParams(&m_vFirstPar); if (m_platform != m_core->GetPlatformType()) { VM_ASSERT(m_platform == MFX_PLATFORM_SOFTWARE); return MFX_ERR_UNSUPPORTED; } if (isNeedChangeVideoParamWarning) { return MFX_WRN_INCOMPATIBLE_VIDEO_PARAM; } return MFX_ERR_NONE; }
// Wait until a frame is ready to be output and set necessary surface flags mfxStatus VideoDECODEH265::DecodeFrame(mfxFrameSurface1 *surface_out, H265DecoderFrame * pFrame) { MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_HOTSPOTS, "VideoDECODEH265::DecodeFrame"); MFX_CHECK_NULL_PTR1(surface_out); mfxI32 index; if (pFrame) { index = pFrame->GetFrameData()->GetFrameMID(); } else { index = m_FrameAllocator->FindSurface(surface_out, m_isOpaq); pFrame = m_pH265VideoDecoder->FindSurface((UMC::FrameMemID)index); if (!pFrame) { VM_ASSERT(false); return MFX_ERR_NOT_FOUND; } } surface_out->Data.Corrupted = 0; int32_t const error = pFrame->GetError(); if (error & UMC::ERROR_FRAME_DEVICE_FAILURE) { surface_out->Data.Corrupted |= MFX_CORRUPTION_MAJOR; if (error == UMC::UMC_ERR_GPU_HANG) return MFX_ERR_GPU_HANG; else return MFX_ERR_DEVICE_FAILED; } else { if (error & UMC::ERROR_FRAME_MINOR) surface_out->Data.Corrupted |= MFX_CORRUPTION_MINOR; if (error & UMC::ERROR_FRAME_MAJOR) surface_out->Data.Corrupted |= MFX_CORRUPTION_MAJOR; if (error & UMC::ERROR_FRAME_REFERENCE_FRAME) surface_out->Data.Corrupted |= MFX_CORRUPTION_REFERENCE_FRAME; if (error & UMC::ERROR_FRAME_DPB) surface_out->Data.Corrupted |= MFX_CORRUPTION_REFERENCE_LIST; if (error & UMC::ERROR_FRAME_RECOVERY) surface_out->Data.Corrupted |= MFX_CORRUPTION_MAJOR; if (error & UMC::ERROR_FRAME_TOP_FIELD_ABSENT) surface_out->Data.Corrupted |= MFX_CORRUPTION_ABSENT_TOP_FIELD; if (error & UMC::ERROR_FRAME_BOTTOM_FIELD_ABSENT) surface_out->Data.Corrupted |= MFX_CORRUPTION_ABSENT_BOTTOM_FIELD; } mfxStatus sts = m_FrameAllocator->PrepareToOutput(surface_out, index, &m_vPar, m_isOpaq); pFrame->setWasDisplayed(); return sts; }
Ipp32u mfxSchedulerCore::scheduler_thread_proc(void *pParam) { MFX_SCHEDULER_THREAD_CONTEXT *pContext = (MFX_SCHEDULER_THREAD_CONTEXT *) pParam; mfxTaskHandle previousTaskHandle = {}; const Ipp32u threadNum = pContext->threadNum; { char thread_name[30] = {0}; my_snprintf(thread_name, sizeof(thread_name)-1, "ThreadName=MSDK#%d", pContext->threadNum); MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_SCHED, thread_name); } // main working cycle for threads while (false == pContext->pSchedulerCore->m_bQuit) { MFX_CALL_INFO call = {}; mfxStatus mfxRes; mfxRes = pContext->pSchedulerCore->GetTask(call, previousTaskHandle, threadNum); if (MFX_ERR_NONE == mfxRes) { mfxU64 start, stop; // perform asynchronous operation try { const char *pRoutineName = call.pTask->entryPoint.pRoutineName; if (!pRoutineName) pRoutineName = "MFX Async Task"; MFX_AUTO_LTRACE(MFX_TRACE_LEVEL_SCHED, pRoutineName); MFX_LTRACE_1(MFX_TRACE_LEVEL_SCHED, "^Child^of", "%d", call.pTask->nParentId); // mark beginning of working period start = pContext->pSchedulerCore->GetHighPerformanceCounter(); // NOTE: it is legacy task call, // it should be eliminated soon if (call.pTask->bObsoleteTask) { call.res = call.pTask->entryPoint.pRoutine(call.pTask->entryPoint.pState, (void *) &call.pTask->obsolete_params, call.threadNum, call.callNum); } // the only legal task calling process. // Should survive only this one :-). else { call.res = call.pTask->entryPoint.pRoutine(call.pTask->entryPoint.pState, call.pTask->entryPoint.pParam, call.threadNum, call.callNum); } // mark end of working period stop = pContext->pSchedulerCore->GetHighPerformanceCounter(); // update thread statistic call.timeSpend = (stop - start); pContext->workTime += call.timeSpend; // save the previous task's handle previousTaskHandle = call.taskHandle; MFX_LTRACE_1(MFX_TRACE_LEVEL_SCHED, "mfxRes = ", "%d", call.res); } catch(...) { call.res = MFX_ERR_UNKNOWN; } // mark the task completed, // set the sync point into the high state if any. pContext->pSchedulerCore->MarkTaskCompleted(&call, threadNum); //timer1.Stop(0); } else { mfxU64 start, stop; #if defined(MFX_SCHEDULER_LOG) mfxLogWriteA(pContext->pSchedulerCore->m_hLog, "[% 4u] thread's sleeping\n", threadNum); #endif // defined(MFX_SCHEDULER_LOG) // mark beginning of sleep period start = pContext->pSchedulerCore->GetHighPerformanceCounter(); // there is no any task. // sleep for a while until the event is signaled. pContext->pSchedulerCore->Wait(threadNum); // mark end of sleep period stop = pContext->pSchedulerCore->GetHighPerformanceCounter(); // update thread statistic pContext->sleepTime += (stop - start); #if defined(MFX_SCHEDULER_LOG) mfxLogWriteA(pContext->pSchedulerCore->m_hLog, "[% 4u] thread woke up\n", threadNum); #endif // defined(MFX_SCHEDULER_LOG) } } return (0x0cced00 + pContext->threadNum); } // Ipp32u mfxSchedulerCore::scheduler_thread_proc(void *pParam)