mfxStatus CQuickSyncDecoder::InitFrameAllocator(mfxVideoParam* pVideoParams, mfxU32 nPitch) { MSDK_TRACE("QsDecoder: InitFrameAllocator\n"); // Already initialized if (m_pFrameSurfaces) { return MFX_ERR_NONE; } MSDK_CHECK_POINTER(m_pmfxDEC, MFX_ERR_NOT_INITIALIZED); mfxStatus sts = MFX_ERR_NONE; // Initialize frame allocator (if needed) sts = CreateAllocator(); MSDK_CHECK_NOT_EQUAL(sts, MFX_ERR_NONE, sts); // Find how many surfaces are needed mfxFrameAllocRequest allocRequest; MSDK_ZERO_VAR(allocRequest); sts = m_pmfxDEC->QueryIOSurf(pVideoParams, &allocRequest); MSDK_IGNORE_MFX_STS(sts, MFX_WRN_PARTIAL_ACCELERATION); MSDK_IGNORE_MFX_STS(sts, MFX_WRN_INCOMPATIBLE_VIDEO_PARAM); MSDK_CHECK_RESULT_P_RET(sts, MFX_ERR_NONE); allocRequest.NumFrameSuggested = (mfxU16)m_nAuxFrameCount + allocRequest.NumFrameSuggested; allocRequest.NumFrameMin = allocRequest.NumFrameSuggested; // Decide memory type allocRequest.Type = MFX_MEMTYPE_EXTERNAL_FRAME | MFX_MEMTYPE_FROM_DECODE; allocRequest.Type |= (m_bUseD3DAlloc) ? MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET : MFX_MEMTYPE_SYSTEM_MEMORY; memcpy(&allocRequest.Info, &pVideoParams->mfx.FrameInfo, sizeof(mfxFrameInfo)); // Allocate frames with H aligned at 32 for both progressive and interlaced content allocRequest.Info.Height = MSDK_ALIGN32(allocRequest.Info.Height); allocRequest.Info.Width = (mfxU16)nPitch; // Perform allocation call. result is saved in m_AllocResponse sts = m_pFrameAllocator->Alloc(m_pFrameAllocator->pthis, &allocRequest, &m_AllocResponse); MSDK_CHECK_RESULT_P_RET(sts, MFX_ERR_NONE); m_nRequiredFramesNum = m_AllocResponse.NumFrameActual; ASSERT(m_nRequiredFramesNum == allocRequest.NumFrameSuggested); m_pFrameSurfaces = new mfxFrameSurface1[m_nRequiredFramesNum]; MSDK_CHECK_POINTER(m_pFrameSurfaces, MFX_ERR_MEMORY_ALLOC); MSDK_ZERO_MEMORY(m_pFrameSurfaces, sizeof(mfxFrameSurface1) * m_nRequiredFramesNum); // Allocate decoder work & output surfaces for (mfxU32 i = 0; i < m_nRequiredFramesNum; ++i) { // Copy frame info memcpy(&(m_pFrameSurfaces[i].Info), &pVideoParams->mfx.FrameInfo, sizeof(mfxFrameInfo)); // Save pointer to allocator specific surface object (mid) m_pFrameSurfaces[i].Data.MemId = m_AllocResponse.mids[i]; m_pFrameSurfaces[i].Data.Pitch = (mfxU16)nPitch; } return sts; }
mfxStatus CD3D11Device::CreateVideoProcessor(mfxFrameSurface1 * pSrf) { HRESULT hres = S_OK; if (!!m_VideoProcessorEnum || NULL == pSrf) return MFX_ERR_NONE; //create video processor D3D11_VIDEO_PROCESSOR_CONTENT_DESC ContentDesc; MSDK_ZERO_MEMORY( ContentDesc ); ContentDesc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; ContentDesc.InputFrameRate.Numerator = 30000; ContentDesc.InputFrameRate.Denominator = 1000; ContentDesc.InputWidth = pSrf->Info.CropW; ContentDesc.InputHeight = pSrf->Info.CropH; ContentDesc.OutputWidth = pSrf->Info.CropW; ContentDesc.OutputHeight = pSrf->Info.CropH; ContentDesc.OutputFrameRate.Numerator = 30000; ContentDesc.OutputFrameRate.Denominator = 1000; ContentDesc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; hres = m_pDX11VideoDevice->CreateVideoProcessorEnumerator( &ContentDesc, m_VideoProcessorEnum.Assign() ); if (FAILED(hres)) return MFX_ERR_DEVICE_FAILED; hres = m_pDX11VideoDevice->CreateVideoProcessor( m_VideoProcessorEnum, 0, m_pVideoProcessor.Assign() ); if (FAILED(hres)) return MFX_ERR_DEVICE_FAILED; return MFX_ERR_NONE; }
mfxStatus CQuickSyncDecoder::InitSession(mfxIMPL impl) { if (m_mfxVideoSession != NULL) return MFX_ERR_NONE; m_mfxVideoSession = new MFXVideoSession; mfxStatus sts = m_mfxVideoSession->Init(impl, &m_ApiVersion); if (MSDK_FAILED(sts)) { MSDK_TRACE("QsDecoder: failed to initialize MSDK session!\n"); return sts; } m_mfxVideoSession->QueryIMPL(&m_mfxImpl); m_mfxVideoSession->QueryVersion(&m_ApiVersion); m_bHwAcceleration = m_mfxImpl != MFX_IMPL_SOFTWARE; m_bUseD3DAlloc = m_bHwAcceleration; m_bUseD3D11Alloc = m_bUseD3DAlloc && ((m_mfxImpl & MFX_IMPL_VIA_D3D11) == MFX_IMPL_VIA_D3D11); m_pmfxDEC = new MFXVideoDECODE((mfxSession)*m_mfxVideoSession); #if MFX_D3D11_SUPPORT if (m_bUseD3D11Alloc) { int nAdapterID = GetMSDKAdapterNumber(*m_mfxVideoSession); if (NULL == m_HwDevice) { m_HwDevice = new CD3D11Device(); if (MSDK_FAILED(sts = m_HwDevice->Init(nAdapterID))) { MSDK_TRACE("QsDecoder: D3D11 init have failed!\n"); MSDK_SAFE_DELETE(m_HwDevice); return sts; } } mfxHDL h = m_HwDevice->GetHandle(MFX_HANDLE_D3D11_DEVICE); sts = m_mfxVideoSession->SetHandle(MFX_HANDLE_D3D11_DEVICE, h); } #endif MSDK_ZERO_MEMORY((void*)&m_LockedSurfaces, sizeof(m_LockedSurfaces)); return MFX_ERR_NONE; }
CRendererPipeline::CRendererPipeline(void) { m_pMFXAllocator = NULL; m_pmfxAllocatorParams = NULL; m_memType = SYSTEM_MEMORY; m_hParentWnd = NULL; m_nWidth = 0; m_nWidth = 0; m_nY = m_nWidth*m_nWidth; m_nUV = (m_nY / 4); m_bUsedNV12 = false; m_pEncSurfaces = NULL; MSDK_ZERO_MEMORY(m_EncResponse); #if D3D_SURFACES_SUPPORT m_hwdev = NULL; #endif }
CD3D9Device::CD3D9Device() { m_pD3D9 = NULL; m_pD3DD9 = NULL; m_pDeviceManager9 = NULL; MSDK_ZERO_MEMORY(m_D3DPP); m_resetToken = 0; m_nViews = 0; m_pS3DControl = NULL; MSDK_ZERO_MEMORY(m_backBufferDesc); m_pDXVAVPS = NULL; m_pDXVAVP_Left = NULL; m_pDXVAVP_Right = NULL; MSDK_ZERO_MEMORY(m_targetRect); MSDK_ZERO_MEMORY(m_VideoDesc); MSDK_ZERO_MEMORY(m_BltParams); MSDK_ZERO_MEMORY(m_Sample); // Initialize DXVA structures DXVA2_AYUVSample16 color = { 0x8000, // Cr 0x8000, // Cb 0x1000, // Y 0xffff // Alpha }; DXVA2_ExtendedFormat format = { // DestFormat DXVA2_SampleProgressiveFrame, // SampleFormat DXVA2_VideoChromaSubsampling_MPEG2, // VideoChromaSubsampling DXVA_NominalRange_0_255, // NominalRange DXVA2_VideoTransferMatrix_BT709, // VideoTransferMatrix DXVA2_VideoLighting_bright, // VideoLighting DXVA2_VideoPrimaries_BT709, // VideoPrimaries DXVA2_VideoTransFunc_709 // VideoTransferFunction }; // init m_VideoDesc structure MSDK_MEMCPY_VAR(m_VideoDesc.SampleFormat, &format, sizeof(DXVA2_ExtendedFormat)); m_VideoDesc.SampleWidth = 0; m_VideoDesc.SampleHeight = 0; m_VideoDesc.InputSampleFreq.Numerator = 60; m_VideoDesc.InputSampleFreq.Denominator = 1; m_VideoDesc.OutputFrameFreq.Numerator = 60; m_VideoDesc.OutputFrameFreq.Denominator = 1; // init m_BltParams structure MSDK_MEMCPY_VAR(m_BltParams.DestFormat, &format, sizeof(DXVA2_ExtendedFormat)); MSDK_MEMCPY_VAR(m_BltParams.BackgroundColor, &color, sizeof(DXVA2_AYUVSample16)); // init m_Sample structure m_Sample.Start = 0; m_Sample.End = 1; m_Sample.SampleFormat = format; m_Sample.PlanarAlpha.Fraction = 0; m_Sample.PlanarAlpha.Value = 1; m_bIsA2rgb10 = FALSE; }
mfxStatus CRendererPipeline::AllocFrames() { mfxStatus sts = MFX_ERR_NONE; mfxFrameAllocRequest EncRequest; mfxU16 nEncSurfNum = 0; // number of surfaces for encoder MSDK_ZERO_MEMORY(EncRequest); MFXVideoENCODE*pmfxENC = new MFXVideoENCODE(m_mfxSession); mfxVideoParam mfxEncParams; MSDK_ZERO_MEMORY(mfxEncParams); mfxEncParams.mfx.CodecId = MFX_CODEC_AVC; mfxEncParams.mfx.TargetUsage = MFX_TARGETUSAGE_BALANCED; mfxEncParams.mfx.TargetKbps = 1024; // in Kbps mfxEncParams.mfx.RateControlMethod = MFX_RATECONTROL_CBR; mfxEncParams.mfx.NumSlice = 0; ConvertFrameRate(60, &mfxEncParams.mfx.FrameInfo.FrameRateExtN, &mfxEncParams.mfx.FrameInfo.FrameRateExtD); mfxEncParams.mfx.EncodedOrder = 0; // binary flag, 0 signals encoder to take frames in display order mfxEncParams.IOPattern = MFX_IOPATTERN_IN_VIDEO_MEMORY; // frame info parameters mfxEncParams.mfx.FrameInfo.FourCC = (m_bUsedNV12 ? MFX_FOURCC_NV12 : MFX_FOURCC_YV12); mfxEncParams.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420; mfxEncParams.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; // set frame size and crops // width must be a multiple of 16 // height must be a multiple of 16 in case of frame picture and a multiple of 32 in case of field picture mfxEncParams.mfx.FrameInfo.Width = (m_nWidth); mfxEncParams.mfx.FrameInfo.Height = (m_nHeight); mfxEncParams.mfx.FrameInfo.CropX = 0; mfxEncParams.mfx.FrameInfo.CropY = 0; mfxEncParams.mfx.FrameInfo.CropW = (m_nWidth); mfxEncParams.mfx.FrameInfo.CropH = (m_nHeight); //mfxEncParams.mfx.GopRefDist = 1; //mfxEncParams.mfx.GopPicSize = m_mfxEncParams.mfx.GopPicSize; mfxEncParams.AsyncDepth = 1; sts = pmfxENC->QueryIOSurf(&mfxEncParams, &EncRequest); MSDK_SAFE_DELETE(pmfxENC); // Calculate the number of surfaces for components. // QueryIOSurf functions tell how many surfaces are required to produce at least 1 output. // To achieve better performance we provide extra surfaces. // 1 extra surface at input allows to get 1 extra output. if (EncRequest.NumFrameSuggested < mfxEncParams.AsyncDepth) return MFX_ERR_MEMORY_ALLOC; // The number of surfaces shared by vpp output and encode input. nEncSurfNum = EncRequest.NumFrameSuggested; // prepare allocation requests EncRequest.NumFrameSuggested = EncRequest.NumFrameMin = nEncSurfNum; MSDK_MEMCPY_VAR(EncRequest.Info, &(mfxEncParams.mfx.FrameInfo), sizeof(mfxFrameInfo)); // alloc frames for encoder sts = m_pMFXAllocator->Alloc(m_pMFXAllocator->pthis, &EncRequest, &m_EncResponse); MSDK_CHECK_RESULT(sts, MFX_ERR_NONE, sts); // prepare mfxFrameSurface1 array for encoder m_pEncSurfaces = new mfxFrameSurface1[m_EncResponse.NumFrameActual]; MSDK_CHECK_POINTER(m_pEncSurfaces, MFX_ERR_MEMORY_ALLOC); for (int i = 0; i < m_EncResponse.NumFrameActual; i++) { memset(&(m_pEncSurfaces[i]), 0, sizeof(mfxFrameSurface1)); MSDK_MEMCPY_VAR(m_pEncSurfaces[i].Info, &(mfxEncParams.mfx.FrameInfo), sizeof(mfxFrameInfo)); m_pEncSurfaces[i].Data.MemId = m_EncResponse.mids[i]; } return MFX_ERR_NONE; }
VAStatus CLibVA::AcquireVASurface( void** pctx, VADisplay dpy1, VASurfaceID srf1, VADisplay dpy2, VASurfaceID* srf2) { if (!pctx || !srf2) return VA_STATUS_ERROR_OPERATION_FAILED; if (dpy1 == dpy2) { *srf2 = srf1; return VA_STATUS_SUCCESS; } AcquireCtx* ctx; unsigned long handle=0; VAStatus va_res; VASurfaceAttrib attribs[2]; VASurfaceAttribExternalBuffers extsrf; uint32_t memtype = VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME; MSDK_ZERO_MEMORY(attribs); MSDK_ZERO_MEMORY(extsrf); extsrf.num_buffers = 1; extsrf.buffers = &handle; attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType; attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE; attribs[0].value.type = VAGenericValueTypeInteger; attribs[0].value.value.i = memtype; attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor; attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE; attribs[1].value.type = VAGenericValueTypePointer; attribs[1].value.value.p = &extsrf; ctx = (AcquireCtx*)calloc(1, sizeof(AcquireCtx)); if (!ctx) return VA_STATUS_ERROR_OPERATION_FAILED; va_res = m_libva.vaDeriveImage(dpy1, srf1, &ctx->image); if (VA_STATUS_SUCCESS != va_res) { free(ctx); return va_res; } va_res = m_fnVaGetSurfaceHandle(dpy1, &srf1, &ctx->fd); if (VA_STATUS_SUCCESS != va_res) { m_libva.vaDestroyImage(dpy1, ctx->image.image_id); free(ctx); return va_res; } extsrf.width = ctx->image.width; extsrf.height = ctx->image.height; extsrf.num_planes = ctx->image.num_planes; extsrf.pixel_format = ctx->image.format.fourcc; for (int i=0; i < 3; ++i) { extsrf.pitches[i] = ctx->image.pitches[i]; extsrf.offsets[i] = ctx->image.offsets[i]; } extsrf.data_size = ctx->image.data_size; extsrf.flags = memtype; extsrf.buffers[0] = ctx->fd; va_res = m_libva.vaCreateSurfaces(dpy2, VA_RT_FORMAT_YUV420, extsrf.width, extsrf.height, srf2, 1, attribs, 2); if (VA_STATUS_SUCCESS != va_res) { m_libva.vaDestroyImage(dpy1, ctx->image.image_id); free(ctx); return va_res; } *pctx = ctx; return VA_STATUS_SUCCESS; }