mfxStatus CRendererPipeline::AllocFrames()
{
	mfxStatus sts = MFX_ERR_NONE;
	mfxFrameAllocRequest EncRequest;

	mfxU16 nEncSurfNum = 0; // number of surfaces for encoder

	MSDK_ZERO_MEMORY(EncRequest);

	MFXVideoENCODE*pmfxENC = new MFXVideoENCODE(m_mfxSession);

	mfxVideoParam mfxEncParams;
	MSDK_ZERO_MEMORY(mfxEncParams);
	mfxEncParams.mfx.CodecId = MFX_CODEC_AVC;
	mfxEncParams.mfx.TargetUsage = MFX_TARGETUSAGE_BALANCED;
	mfxEncParams.mfx.TargetKbps = 1024; // in Kbps
	mfxEncParams.mfx.RateControlMethod = MFX_RATECONTROL_CBR;
	mfxEncParams.mfx.NumSlice = 0;
	ConvertFrameRate(60, &mfxEncParams.mfx.FrameInfo.FrameRateExtN, &mfxEncParams.mfx.FrameInfo.FrameRateExtD);
	mfxEncParams.mfx.EncodedOrder = 0; // binary flag, 0 signals encoder to take frames in display order
	mfxEncParams.IOPattern = MFX_IOPATTERN_IN_VIDEO_MEMORY;

	// frame info parameters
	mfxEncParams.mfx.FrameInfo.FourCC = (m_bUsedNV12 ? MFX_FOURCC_NV12 : MFX_FOURCC_YV12);
	mfxEncParams.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
	mfxEncParams.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;

	// set frame size and crops
	// width must be a multiple of 16
	// height must be a multiple of 16 in case of frame picture and a multiple of 32 in case of field picture
	mfxEncParams.mfx.FrameInfo.Width = (m_nWidth);
	mfxEncParams.mfx.FrameInfo.Height = (m_nHeight);

	mfxEncParams.mfx.FrameInfo.CropX = 0;
	mfxEncParams.mfx.FrameInfo.CropY = 0;
	mfxEncParams.mfx.FrameInfo.CropW = (m_nWidth);
	mfxEncParams.mfx.FrameInfo.CropH = (m_nHeight);

	//mfxEncParams.mfx.GopRefDist = 1;
	//mfxEncParams.mfx.GopPicSize = m_mfxEncParams.mfx.GopPicSize;

	mfxEncParams.AsyncDepth = 1;
	sts = pmfxENC->QueryIOSurf(&mfxEncParams, &EncRequest);
	MSDK_SAFE_DELETE(pmfxENC);

	// Calculate the number of surfaces for components.
	// QueryIOSurf functions tell how many surfaces are required to produce at least 1 output.
	// To achieve better performance we provide extra surfaces.
	// 1 extra surface at input allows to get 1 extra output.

	if (EncRequest.NumFrameSuggested < mfxEncParams.AsyncDepth)
		return MFX_ERR_MEMORY_ALLOC;

	// The number of surfaces shared by vpp output and encode input.
	nEncSurfNum = EncRequest.NumFrameSuggested;

	// prepare allocation requests
	EncRequest.NumFrameSuggested = EncRequest.NumFrameMin = nEncSurfNum;
	MSDK_MEMCPY_VAR(EncRequest.Info, &(mfxEncParams.mfx.FrameInfo), sizeof(mfxFrameInfo));
	// alloc frames for encoder
	sts = m_pMFXAllocator->Alloc(m_pMFXAllocator->pthis, &EncRequest, &m_EncResponse);
	MSDK_CHECK_RESULT(sts, MFX_ERR_NONE, sts);

	// prepare mfxFrameSurface1 array for encoder
	m_pEncSurfaces = new mfxFrameSurface1[m_EncResponse.NumFrameActual];
	MSDK_CHECK_POINTER(m_pEncSurfaces, MFX_ERR_MEMORY_ALLOC);

	for (int i = 0; i < m_EncResponse.NumFrameActual; i++)
	{
		memset(&(m_pEncSurfaces[i]), 0, sizeof(mfxFrameSurface1));
		MSDK_MEMCPY_VAR(m_pEncSurfaces[i].Info, &(mfxEncParams.mfx.FrameInfo), sizeof(mfxFrameInfo));

		m_pEncSurfaces[i].Data.MemId = m_EncResponse.mids[i];
	}

	return MFX_ERR_NONE;
}
示例#2
0
    QSVEncoder(int fps_, int width, int height, int quality, CTSTR preset, bool bUse444, int maxBitrate, int bufferSize, bool bUseCFR_, bool bDupeFrames_)
        : enc(nullptr)
    {
        Log(TEXT("------------------------------------------"));
        for(int i = 0; i < sizeof(validImpl)/sizeof(validImpl[0]); i++)
        {
            mfxIMPL impl = validImpl[i];
            ver = version;
            mfxStatus result = MFX_ERR_UNKNOWN;
            for(ver.Minor = 6; ver.Minor >= 4; ver.Minor -= 2)
            {
                result = session.Init(impl, &ver);
                if(result == MFX_ERR_NONE)
                {
                    Log(TEXT("QSV version %u.%u using %s"), ver.Major, ver.Minor, implStr[impl]);
                    break;
                }
            }
            if(result == MFX_ERR_NONE)
                break;
        }

        session.SetPriority(MFX_PRIORITY_HIGH);

        fps = fps_;

        bUseCBR = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCBR")) != 0;
        bUseCFR = bUseCFR_;
        bDupeFrames = bDupeFrames_;

        memset(&params, 0, sizeof(params));
        //params.AsyncDepth = 0;
        params.mfx.CodecId = MFX_CODEC_AVC;
        params.mfx.TargetUsage = MFX_TARGETUSAGE_BEST_QUALITY;//SPEED;
        params.mfx.TargetKbps = (mfxU16)(maxBitrate*0.9);
        params.mfx.MaxKbps = maxBitrate;
        //params.mfx.InitialDelayInKB = 1;
        //params.mfx.GopRefDist = 1;
        //params.mfx.NumRefFrame = 0;
        params.mfx.GopPicSize = 61;
        params.mfx.GopRefDist = 3;
        params.mfx.GopOptFlag = MFX_GOP_STRICT;
        params.mfx.IdrInterval = 2;
        params.mfx.NumSlice = 1;

        params.mfx.RateControlMethod = bUseCBR ? MFX_RATECONTROL_CBR : MFX_RATECONTROL_VBR;
        params.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;

        auto& fi = params.mfx.FrameInfo;
        ConvertFrameRate(fps, fi.FrameRateExtN, fi.FrameRateExtD);

        fi.FourCC = MFX_FOURCC_NV12;
        fi.ChromaFormat = bUse444 ? MFX_CHROMAFORMAT_YUV444 : MFX_CHROMAFORMAT_YUV420;
        fi.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;

        fi.Width = ALIGN16(width);
        fi.Height = ALIGN16(height);

        fi.CropX = 0;
        fi.CropY = 0;
        fi.CropW = width;
        fi.CropH = height;

        this->width  = width;
        this->height = height;

        enc.reset(new MFXVideoENCODE(session));
        enc->Close();

        mfxFrameAllocRequest req;
        memset(&req, 0, sizeof(req));
        enc->QueryIOSurf(&params, &req);

        enc->Init(&params);

        decltype(params) query;
        memcpy(&query, &params, sizeof(params));
        enc->GetVideoParam(&query);

        unsigned num_surf = max(6, req.NumFrameSuggested + params.AsyncDepth);

        encode_tasks.SetSize(num_surf);

        const unsigned bs_size = max(query.mfx.BufferSizeInKB*1000, bufferSize*1024/8);
        bs_buff.SetSize(bs_size * encode_tasks.Num() + 31);
        params.mfx.BufferSizeInKB = bs_size/1000;

        mfxU8* bs_start = (mfxU8*)(((size_t)bs_buff.Array() + 31)/32*32);
        for(unsigned i = 0; i < encode_tasks.Num(); i++)
        {
            encode_tasks[i].sp = nullptr;

            mfxFrameSurface1& surf = encode_tasks[i].surf;
            memset(&surf, 0, sizeof(mfxFrameSurface1));
            memcpy(&surf.Info, &params.mfx.FrameInfo, sizeof(params.mfx.FrameInfo));
            
            mfxBitstream& bs = encode_tasks[i].bs;
            memset(&bs, 0, sizeof(mfxBitstream));
            bs.Data = bs_start + i*bs_size;
            bs.MaxLength = bs_size;

            idle_tasks << i;
        }

        frames.SetSize(num_surf+3); //+NUM_OUT_BUFFERS

        const unsigned lum_channel_size = fi.Width*fi.Height,
                       uv_channel_size = fi.Width*fi.Height,
                       frame_size = lum_channel_size + uv_channel_size;
        frame_buff.SetSize(frame_size * frames.Num() + 15);

        mfxU8* frame_start = (mfxU8*)(((size_t)frame_buff.Array() + 15)/16*16);
        memset(frame_start, 0, frame_size * frames.Num());
        for(unsigned i = 0; i < frames.Num(); i++)
        {
            mfxFrameData& frame = frames[i];
            memset(&frame, 0, sizeof(mfxFrameData));
            frame.Y = frame_start + i * frame_size;
            frame.UV = frame_start + i * frame_size + lum_channel_size;
            frame.V = frame.UV + 1;
            frame.Pitch = fi.Width;
        }

        Log(TEXT("Using %u encode tasks"), encode_tasks.Num());
        Log(TEXT("Buffer size: %u configured, %u suggested by QSV; using %u"),
            bufferSize, query.mfx.BufferSizeInKB*1000*8/1024, params.mfx.BufferSizeInKB*1000*8/1024);

        Log(TEXT("------------------------------------------"));
        Log(TEXT("%s"), GetInfoString().Array());
        Log(TEXT("------------------------------------------"));

        memset(&ctrl, 0, sizeof(ctrl));
        ctrl.FrameType = MFX_FRAMETYPE_I | MFX_FRAMETYPE_REF | MFX_FRAMETYPE_IDR;

        deferredFrames = 0;

        bUsingDecodeTimestamp = false && ver.Minor >= 6;

        DataPacket packet;
        GetHeaders(packet);
    }
示例#3
0
    QSVEncoder(int fps_, int width, int height, int quality, CTSTR preset, bool bUse444, int maxBitrate, int bufferSize, bool bUseCFR_, bool bDupeFrames_)
        : enc(nullptr)
    {
        Log(TEXT("------------------------------------------"));
        for(int i = 0; i < sizeof(validImpl)/sizeof(validImpl[0]); i++)
        {
            mfxIMPL impl = validImpl[i];
            mfxVersion ver = version;
            auto result = session.Init(impl, &ver);
            if(result == MFX_ERR_NONE)
            {
                Log(TEXT("QSV version %u.%u using %s"), ver.Major, ver.Minor, implStr[impl]);
                break;
            }
        }

        fps = fps_;

        bUseCBR = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCBR")) != 0;
        bUseCFR = bUseCFR_;
        bDupeFrames = bDupeFrames_;

        memset(&params, 0, sizeof(params));
        params.AsyncDepth = 1;
        params.mfx.CodecId = MFX_CODEC_AVC;
        params.mfx.TargetUsage = MFX_TARGETUSAGE_BEST_QUALITY;
        params.mfx.TargetKbps = maxBitrate;
        params.mfx.MaxKbps = maxBitrate;
        params.mfx.InitialDelayInKB = 1;
        //params.mfx.GopRefDist = 1;
        //params.mfx.NumRefFrame = 0;
        params.mfx.RateControlMethod = bUseCBR ? MFX_RATECONTROL_CBR : MFX_RATECONTROL_VBR;
        params.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;

        auto& fi = params.mfx.FrameInfo;
        ConvertFrameRate(fps, fi.FrameRateExtN, fi.FrameRateExtD);

        fi.FourCC = MFX_FOURCC_NV12;
        fi.ChromaFormat = bUse444 ? MFX_CHROMAFORMAT_YUV444 : MFX_CHROMAFORMAT_YUV420;
        fi.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;

        fi.Width = ALIGN16(width);
        fi.Height = ALIGN16(height);

        fi.CropX = 0;
        fi.CropY = 0;
        fi.CropW = width;
        fi.CropH = height;

        this->width  = width;
        this->height = height;

        enc.reset(new MFXVideoENCODE(session));
        enc->Close();

        auto result = enc->Init(&params);

        memset(&enc_surf, 0, sizeof(enc_surf));
        memcpy(&enc_surf.Info, &params.mfx.FrameInfo, sizeof(enc_surf.Info));

        decltype(params) query;
        memcpy(&query, &params, sizeof(params));
        enc->GetVideoParam(&query);

        unsigned size = max(query.mfx.BufferSizeInKB*1000, bufferSize*1024/8);
        bs_buff.SetSize(size+31);//.resize(size+31);
        bs.Data = (mfxU8*)(((size_t)bs_buff.Array() + 31) / 32 * 32);
        bs.MaxLength = size;
        params.mfx.BufferSizeInKB = size/1000;
        Log(TEXT("Buffer size: %u configured, %u suggested by QSV; using %u"),
            bufferSize, query.mfx.BufferSizeInKB*1000*8/1024, size*8/1024);

        Log(TEXT("------------------------------------------"));
        Log(TEXT("%s"), GetInfoString().Array());
        Log(TEXT("------------------------------------------"));

        memset(&ctrl, 0, sizeof(ctrl));
        ctrl.FrameType = MFX_FRAMETYPE_I | MFX_FRAMETYPE_REF | MFX_FRAMETYPE_IDR;

        DataPacket packet;
        GetHeaders(packet);
    }