NVENCSTATUS VideoEncoder::EncodeFrame(EncodeFrameConfig *pEncodeFrame, NV_ENC_PIC_STRUCT picType, bool bFlush)
{
    NVENCSTATUS nvStatus = NV_ENC_SUCCESS;

    if (bFlush)
    {
        FlushEncoder();
        return NV_ENC_SUCCESS;
    }

    assert(pEncodeFrame);

    EncodeBuffer *pEncodeBuffer = m_EncodeBufferQueue.GetAvailable();
    if (!pEncodeBuffer)
    {
        pEncodeBuffer = m_EncodeBufferQueue.GetPending();
        m_pNvHWEncoder->ProcessOutput(pEncodeBuffer);
        // UnMap the input buffer after frame done
        if (pEncodeBuffer->stInputBfr.hInputSurface)
        {
            nvStatus = m_pNvHWEncoder->NvEncUnmapInputResource(pEncodeBuffer->stInputBfr.hInputSurface);
            pEncodeBuffer->stInputBfr.hInputSurface = NULL;
        }
        pEncodeBuffer = m_EncodeBufferQueue.GetAvailable();
    }

    // encode width and height
    unsigned int dwWidth  = pEncodeBuffer->stInputBfr.dwWidth;
    unsigned int dwHeight = pEncodeBuffer->stInputBfr.dwHeight;

    // Here we copy from Host to Device Memory (CUDA)
    cuvidCtxLock(m_ctxLock, 0);
    assert(pEncodeFrame->width == dwWidth && pEncodeFrame->height == dwHeight);

    CUDA_MEMCPY2D memcpy2D  = {0};
    memcpy2D.srcMemoryType  = CU_MEMORYTYPE_DEVICE;
    memcpy2D.srcDevice      = pEncodeFrame->dptr;
    memcpy2D.srcPitch       = pEncodeFrame->pitch;
    memcpy2D.dstMemoryType  = CU_MEMORYTYPE_DEVICE;
    memcpy2D.dstDevice      = (CUdeviceptr)pEncodeBuffer->stInputBfr.pNV12devPtr;
    memcpy2D.dstPitch       = pEncodeBuffer->stInputBfr.uNV12Stride;
    memcpy2D.WidthInBytes   = dwWidth;
    memcpy2D.Height         = dwHeight*3/2;
    __cu(cuMemcpy2D(&memcpy2D));

    cuvidCtxUnlock(m_ctxLock, 0);

    nvStatus = m_pNvHWEncoder->NvEncMapInputResource(pEncodeBuffer->stInputBfr.nvRegisteredResource, &pEncodeBuffer->stInputBfr.hInputSurface);
    if (nvStatus != NV_ENC_SUCCESS)
    {
        PRINTERR("Failed to Map input buffer %p\n", pEncodeBuffer->stInputBfr.hInputSurface);
        return nvStatus;
    }

    m_pNvHWEncoder->NvEncEncodeFrame(pEncodeBuffer, NULL, pEncodeFrame->width, pEncodeFrame->height, picType);
    m_iEncodedFrames++;

    return NV_ENC_SUCCESS;
}
Exemple #2
0
// _______________________________________________________________________________________
//
void	CAAudioFile::Close()
{
	LOG_FUNCTION("CAAudioFile::Close", NULL, NULL);
	if (mMode == kClosed)
		return;
	if (mMode == kWriting)
		FlushEncoder();
	CloseConverter();
	if (mAudioFile != 0 && mOwnOpenFile) {
		AudioFileClose(mAudioFile);
		mAudioFile = 0;
	}
	if (!mClientOwnsIOBuffer) {
		delete[] (Byte *)mIOBufferList.mBuffers[0].mData;
		mIOBufferList.mBuffers[0].mData = NULL;
		mIOBufferList.mBuffers[0].mDataByteSize = 0;
	}
	delete[] mPacketDescs;	mPacketDescs = NULL;	mNumPacketDescs = 0;
	delete[] mMagicCookie;	mMagicCookie = NULL;
	delete mWriteBufferList;	mWriteBufferList = NULL;
	mMode = kClosed;
}